diff --git a/.cargo-husky/hooks/pre-commit b/.cargo-husky/hooks/pre-commit new file mode 100755 index 00000000..9ffe923d --- /dev/null +++ b/.cargo-husky/hooks/pre-commit @@ -0,0 +1,6 @@ +#!/bin/sh + +set -e + +echo '+cargo fmt --check' +cargo fmt --check || (cargo fmt && exit 1) diff --git a/.cargo-husky/hooks/pre-push b/.cargo-husky/hooks/pre-push new file mode 100755 index 00000000..9181ab3b --- /dev/null +++ b/.cargo-husky/hooks/pre-push @@ -0,0 +1,20 @@ +#!/bin/sh + +set -e + +echo '+cargo fmt --check' +cargo fmt --check || (cargo fmt && exit 1) + +echo "unstaged changes" +echo 'git diff-index --quiet HEAD --' +git diff-index --quiet HEAD -- + +echo '+cargo clippy -- -Dwarnings -Dclippy::all -Dclippy::pedantic' +cargo clippy --all -- -Dwarnings + +echo '+cargo test --all' +cargo build +cargo test --all || (echo "might need to rebuild make build-snapshot" && exit 1) + +echo '+cargo run --bin doc-gen --features clap-markdown' +cargo run --bin doc-gen --features clap-markdown diff --git a/.cargo/config.toml b/.cargo/config.toml new file mode 100644 index 00000000..da0b5d5f --- /dev/null +++ b/.cargo/config.toml @@ -0,0 +1,160 @@ +# paths = ["/path/to/override"] # path dependency overrides + +[alias] # command aliases +md-gen = "run --bin doc-gen --features clap-markdown" +f = "fmt" +# b = "build" +# c = "check" +# t = "test" +# r = "run" +# rr = "run --release" +# recursive_example = "rr --example recursions" +# space_example = ["run", "--release", "--", "\"command list\""] + +[build] +# jobs = 1 # number of parallel jobs, defaults to # of CPUs +# rustc = "rustc" # the rust compiler tool +# rustc-wrapper = "…" # run this wrapper instead of `rustc` +# rustc-workspace-wrapper = "…" # run this wrapper instead of `rustc` for workspace members +# rustdoc = "rustdoc" # the doc generator tool +# target = "triple" # build for the target triple (ignored by `cargo install`) +# target-dir = "target" # path of where to place all generated artifacts +rustflags = [ + "-Wclippy::pedantic", + "-Aclippy::needless_pass_by_value", + "-Aclippy::must_use_candidate", + "-Aclippy::missing_panics_doc", + "-Aclippy::missing_errors_doc", + # "-Aclippy::missing_safety_doc", + # "-Aclippy::inline_always", + # "-Aclippy::default_trait_access", + # "-Aclippy::module_name_repetitions", + # "-Aclippy::module_name_repetitions", + # "-Aclippy::too_many_lines", + # "-Aclippy::cast_possible_truncation", + # "-Aclippy::cast_sign_loss", + # "-Aclippy::cast_possible_wrap", + # "-Aclippy::similar_names", + # "-Aclippy::doc_markdown", + # "-Aclippy::struct_excessive_bools", + # "-Aclippy::cast_lossless", + # "-Aclippy::trivially_copy_pass_by_ref", + # "-Aclippy::wrong_self_convention", + # "-Aclippy::unused_self", + # "-Aclippy::enum_glob_use", + # "-Aclippy::return_self_not_must_use", + # "-Aclippy::map_entry", + # "-Aclippy::match_same_arms", + # "-Aclippy::iter_not_returning_iterator", + # "-Aclippy::unnecessary_wraps", + # "-Aclippy::type_complexity", +] # custom flags to pass to all compiler invocations +# rustdocflags = ["…", "…"] # custom flags to pass to rustdoc +# incremental = true # whether or not to enable incremental compilation +# dep-info-basedir = "…" # path for the base directory for targets in depfiles + +# [doc] +# browser = "chromium" # browser to use with `cargo doc --open`, +# # overrides the `BROWSER` environment variable + +# [env] +# # Set ENV_VAR_NAME=value for any process run by Cargo +# ENV_VAR_NAME = "value" +# # Set even if already present in environment +# ENV_VAR_NAME_2 = { value = "value", force = true } +# # Value is relative to .cargo directory containing `config.toml`, make absolute +# ENV_VAR_NAME_3 = { value = "relative/path", relative = true } + +# [future-incompat-report] +# frequency = 'always' # when to display a notification about a future incompat report + +# [cargo-new] +# vcs = "none" # VCS to use ('git', 'hg', 'pijul', 'fossil', 'none') + +# [http] +# debug = false # HTTP debugging +# proxy = "host:port" # HTTP proxy in libcurl format +# ssl-version = "tlsv1.3" # TLS version to use +# ssl-version.max = "tlsv1.3" # maximum TLS version +# ssl-version.min = "tlsv1.1" # minimum TLS version +# timeout = 30 # timeout for each HTTP request, in seconds +# low-speed-limit = 10 # network timeout threshold (bytes/sec) +# cainfo = "cert.pem" # path to Certificate Authority (CA) bundle +# check-revoke = true # check for SSL certificate revocation +# multiplexing = true # HTTP/2 multiplexing +# user-agent = "…" # the user-agent header + +# [install] +# root = "/some/path" # `cargo install` destination directory + +# [net] +# retry = 2 # network retries +# git-fetch-with-cli = true # use the `git` executable for git operations +# offline = true # do not access the network + +# [net.ssh] +# known-hosts = ["..."] # known SSH host keys + +# [patch.] +# # Same keys as for [patch] in Cargo.toml + +# [profile.] # Modify profile settings via config. +# inherits = "dev" # Inherits settings from [profile.dev]. +# opt-level = 0 # Optimization level. +# debug = true # Include debug info. +# split-debuginfo = '...' # Debug info splitting behavior. +# debug-assertions = true # Enables debug assertions. +# overflow-checks = true # Enables runtime integer overflow checks. +# lto = false # Sets link-time optimization. +# panic = 'unwind' # The panic strategy. +# incremental = true # Incremental compilation. +# codegen-units = 16 # Number of code generation units. +# rpath = false # Sets the rpath linking option. +# [profile..build-override] # Overrides build-script settings. +# # Same keys for a normal profile. +# [profile..package.] # Override profile for a package. +# # Same keys for a normal profile (minus `panic`, `lto`, and `rpath`). + +# [registries.] # registries other than crates.io +# index = "…" # URL of the registry index +# token = "…" # authentication token for the registry + +# [registry] +# default = "…" # name of the default registry +# token = "…" # authentication token for crates.io + +# [source.] # source definition and replacement +# replace-with = "…" # replace this source with the given named source +# directory = "…" # path to a directory source +# registry = "…" # URL to a registry source +# local-registry = "…" # path to a local registry source +# git = "…" # URL of a git repository source +# branch = "…" # branch name for the git repository +# tag = "…" # tag name for the git repository +# rev = "…" # revision for the git repository + +# [target.] +# linker = "…" # linker to use +# runner = "…" # wrapper to run executables +# rustflags = ["…", "…"] # custom flags for `rustc` + +# [target.] +# runner = "…" # wrapper to run executables +# rustflags = ["…", "…"] # custom flags for `rustc` + +# [target..] # `links` build script override +# rustc-link-lib = ["foo"] +# rustc-link-search = ["/path/to/foo"] +# rustc-flags = ["-L", "/some/path"] +# rustc-cfg = ['key="value"'] +# rustc-env = {key = "value"} +# rustc-cdylib-link-arg = ["…"] +# metadata_key1 = "value" +# metadata_key2 = "value" + +# [term] +# quiet = false # whether cargo output is quiet +# verbose = false # whether cargo provides verbose output +# color = 'auto' # whether cargo colorizes output +# progress.when = 'auto' # whether cargo shows progress bar +# progress.width = 80 # width of progress bar diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..7af275d4 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,2 @@ +target/ +.soroban/ diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..39fc29cb --- /dev/null +++ b/.gitattributes @@ -0,0 +1,2 @@ +Cargo.lock text -merge eol=lf +cmd/crates/soroban-spec-typescript/fixtures/**/* linguist-generated=true -diff diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000..29cc9381 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,3 @@ +# Involve the contract committer team when making changes to the soroban +# contract build command. +/cmd/soroban-cli/src/commands/contract/build.rs @stellar/contract-committers diff --git a/.github/actions/setup-go/action.yml b/.github/actions/setup-go/action.yml new file mode 100644 index 00000000..1bfd4a41 --- /dev/null +++ b/.github/actions/setup-go/action.yml @@ -0,0 +1,55 @@ +name: 'Setup the Go environment' +description: 'Installs go and restores/saves the build/module cache' +inputs: + go-version: + required: true +runs: + using: "composite" + steps: + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: ${{ inputs.go-version }} + stable: ${{ !(contains(inputs.go-version, 'beta') || contains(inputs.go-version, 'rc')) }} + + # Restore original modification time of files based on the date of the most + # recent commit that modified them as mtimes affect the Go test cache. + - name: Restore modification time of checkout files + shell: bash + run: | + # Set a base, fixed modification time of all directories. + # git-restore-mtime doesn't set the mtime of all directories. + # (see https://github.com/MestreLion/git-tools/issues/47 for details) + touch -m -t '201509301646' $(find . -type d -not -path '.git/*') + # Restore original modification time from git. git clone sets the + # modification time to the current time, but Go tests that access fixtures + # get invalidated if their modification times change. + sudo apt-get install -y git-restore-mtime + git restore-mtime + + # The PREFIX must uniquely identify the specific instance of a job executing. + - shell: bash + run: echo 'PREFIX=${{ github.workflow }}-${{ github.job }}-${{ runner.os }}-${{ inputs.go-version }}-matrix(${{ join(matrix.*,'|') }})' >> $GITHUB_ENV + + # Cache the Go Modules downloaded during the job. + - uses: actions/cache@v2 + with: + path: ~/go/pkg/mod + key: ${{ env.PREFIX }}-go-mod-${{ hashFiles('**/go.sum') }} + restore-keys: ${{ env.PREFIX }}-go-mod- + + # Cache any build and test artifacts during the job, which will speed up + # rebuilds and cause test runs to skip tests that have no reason to rerun. + - uses: actions/cache@v2 + with: + path: ~/.cache/go-build + key: ${{ env.PREFIX }}-go-build-${{ github.ref }}-${{ hashFiles('**', '!.git') }} + restore-keys: | + ${{ env.PREFIX }}-go-build-${{ github.ref }}- + ${{ env.PREFIX }}-go-build- + + # Reset the cache for master/protected branches, to ensure they build and run the tests from zero + # and that the module cache is cleaned (otherwise it accumulates orphan dependencies over time). + - if: github.ref_protected + shell: bash + run: sudo rm -rf ~/.cache/go-build ~/go/pkg/mod diff --git a/.github/actions/setup-integration-tests/action.yml b/.github/actions/setup-integration-tests/action.yml new file mode 100644 index 00000000..938acb6d --- /dev/null +++ b/.github/actions/setup-integration-tests/action.yml @@ -0,0 +1,61 @@ +name: 'Set up integration tests' +description: 'Set up Go & Rust, build artifacts, work around cache issues and Ubuntu quirks' +inputs: + go-version: + required: true +runs: + using: "composite" + steps: + - uses: ./.github/actions/setup-go + with: + go-version: ${{ matrix.go }} + - uses: stellar/actions/rust-cache@main + - name: Build soroban contract fixtures + shell: bash + run: | + rustup update + rustup target add wasm32-unknown-unknown + make build_rust + make build-test-wasms + + - name: Install Captive Core + shell: bash + run: | + # Workaround for https://github.com/actions/virtual-environments/issues/5245, + # libc++1-8 won't be installed if another version is installed (but apt won't give you a helpful + # message about why the installation fails) + sudo apt-get remove -y libc++1-10 libc++abi1-10 || true + + sudo wget -qO - https://apt.stellar.org/SDF.asc | APT_KEY_DONT_WARN_ON_DANGEROUS_USAGE=true sudo apt-key add - + sudo bash -c 'echo "deb https://apt.stellar.org focal unstable" > /etc/apt/sources.list.d/SDF-unstable.list' + sudo apt-get update && sudo apt-get install -y stellar-core="$PROTOCOL_20_CORE_DEBIAN_PKG_VERSION" + echo "Using stellar core version $(stellar-core version)" + + # Docker-compose's remote contexts on Ubuntu 20 started failing with an OpenSSL versioning error. + # See https://stackoverflow.com/questions/66579446/error-executing-docker-compose-building-webserver-unable-to-prepare-context-un + - name: Work around Docker Compose problem + shell: bash + run: | + sudo apt-get update + sudo apt-get install -y ca-certificates curl gnupg + + # Install docker apt repo + sudo install -m 0755 -d /etc/apt/keyrings + curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg + sudo chmod a+r /etc/apt/keyrings/docker.gpg + echo \ + "deb [arch="$(dpkg --print-architecture)" signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu \ + "$(. /etc/os-release && echo "$VERSION_CODENAME")" stable" | \ + sudo tee /etc/apt/sources.list.d/docker.list > /dev/null + + # Install docker-compose v2 from apt repo + sudo apt-get update + sudo apt-get remove -y moby-compose + sudo apt-get install -y docker-compose-plugin + + echo "Docker Compose Version:" + docker-compose version + + - name: Build libpreflight + shell: bash + run: make build-libpreflight diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 00000000..6ff499cf --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,11 @@ +### What + +[TODO: Short statement about what is changing.] + +### Why + +[TODO: Why this change is being made. Include any context required to understand the why.] + +### Known limitations + +[TODO or N/A] diff --git a/.github/workflows/bindings-ts.yml b/.github/workflows/bindings-ts.yml new file mode 100644 index 00000000..62469fd7 --- /dev/null +++ b/.github/workflows/bindings-ts.yml @@ -0,0 +1,43 @@ + +name: bindings typescript + +on: + push: + branches: [main, release/**] + pull_request: + +jobs: + test: + name: test generated libraries + runs-on: ubuntu-22.04 + services: + rpc: + image: stellar/quickstart:soroban-dev@sha256:0ad51035cf7caba2fd99c7c1fad0945df6932be7d5c893e1520ccdef7d6a6ffe + ports: + - 8000:8000 + env: + ENABLE_LOGS: true + NETWORK: local + ENABLE_SOROBAN_RPC: true + options: >- + --health-cmd "curl --no-progress-meter --fail-with-body -X POST \"http://localhost:8000/soroban/rpc\" -H 'Content-Type: application/json' -d '{\"jsonrpc\":\"2.0\",\"id\":8675309,\"method\":\"getNetwork\"}' && curl --no-progress-meter \"http://localhost:8000/friendbot\" | grep '\"invalid_field\": \"addr\"'" + --health-interval 10s + --health-timeout 5s + --health-retries 50 + steps: + - uses: actions/checkout@v3 + - uses: actions/cache@v3 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + target/ + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + - run: rustup update + - run: cargo build + - run: rustup target add wasm32-unknown-unknown + - run: make build-test-wasms + - run: npm ci && npm run test + working-directory: cmd/crates/soroban-spec-typescript/ts-tests diff --git a/.github/workflows/bump-version.yml b/.github/workflows/bump-version.yml new file mode 100644 index 00000000..a79b1912 --- /dev/null +++ b/.github/workflows/bump-version.yml @@ -0,0 +1,15 @@ +name: Bump Version + +on: + workflow_dispatch: + inputs: + version: + description: 'Version to bump to' + required: true + +jobs: + + bump-version: + uses: stellar/actions/.github/workflows/rust-bump-version.yml@main + with: + version: ${{ inputs.version }} diff --git a/.github/workflows/dependency-check.yml b/.github/workflows/dependency-check.yml new file mode 100644 index 00000000..a4ec6585 --- /dev/null +++ b/.github/workflows/dependency-check.yml @@ -0,0 +1,26 @@ +name: Dependency sanity checker + +on: + push: + branches: [main, release/**] + pull_request: + +defaults: + run: + shell: bash + +jobs: + dependency-sanity-checker: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - run: rustup update + - uses: actions/setup-go@v3 + with: + go-version: 1.21 + - run: scripts/check-dependencies.bash + validate-rust-git-rev-deps: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: stellar/actions/rust-check-git-rev-deps@main diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml new file mode 100644 index 00000000..0ca517d0 --- /dev/null +++ b/.github/workflows/e2e.yml @@ -0,0 +1,106 @@ +name: Soroban Tools e2e + +on: + push: + branches: [main, release/**] + pull_request: + +jobs: + integration: + name: System tests + strategy: + matrix: + scenario-filter: ["^TestDappDevelop$/^.*$"] + runs-on: ubuntu-latest-4-cores + env: + # the gh tag of system-test repo version to run + SYSTEM_TEST_GIT_REF: master + + # the soroban tools source code to compile and run from system test + # refers to checked out source of current git hub ref context + SYSTEM_TEST_SOROBAN_TOOLS_REF: ${{ github.workspace }}/soroban-tools + + # core git ref should be latest commit for stable soroban functionality + # the core bin can either be compiled in-line here as part of ci, + SYSTEM_TEST_CORE_GIT_REF: https://github.com/stellar/stellar-core.git#v20.1.0 + SYSTEM_TEST_CORE_COMPILE_CONFIGURE_FLAGS: "--disable-tests" + # or set SYSTEM_TEST_CORE_GIT_REF to empty, and set SYSTEM_TEST_CORE_IMAGE + # to pull a pre-compiled image from dockerhub instead + SYSTEM_TEST_CORE_IMAGE: + + # sets the version of rust toolchain that will be pre-installed in the + # test runtime environment, tests invoke rustc/cargo + SYSTEM_TEST_RUST_TOOLCHAIN_VERSION: stable + + # set the version of js-stellar-sdk to use, need to choose one of either + # resolution options, using npm release or a gh ref: + # + # option #1, set the version of stellar-sdk based on a npm release version + SYSTEM_TEST_JS_STELLAR_SDK_NPM_VERSION: 11.1.0 + # option #2, set the version of stellar-sdk used as a ref to a gh repo if + # a value is set on SYSTEM_TEST_JS_STELLAR_SDK_GH_REPO, it takes + # precedence over any SYSTEM_TEST_JS_STELLAR_SDK_NPM_VERSION + SYSTEM_TEST_JS_STELLAR_SDK_GH_REPO: + SYSTEM_TEST_JS_STELLAR_SDK_GH_REF: + + # the version of rs-stellar-xdr to use for quickstart + SYSTEM_TEST_RS_XDR_GIT_REF: v20.0.2 + + # system test will build quickstart image internally to use for running the service stack + # configured in standalone network mode(core, rpc) + SYSTEM_TEST_QUICKSTART_GIT_REF: https://github.com/stellar/quickstart.git#412bb828ddb4a93745227ab5ad97c623d43f3a5f + + # triggers system test to log out details from quickstart's logs and test steps + SYSTEM_TEST_VERBOSE_OUTPUT: "true" + + # the soroban test cases will compile various contracts from the examples repo + SYSTEM_TEST_SOROBAN_EXAMPLES_GIT_HASH: "v20.0.0" + SYSTEM_TEST_SOROBAN_EXAMPLES_GIT_REPO: "https://github.com/stellar/soroban-examples.git" + steps: + - uses: actions/checkout@v3 + name: checkout system-test + with: + repository: stellar/system-test + ref: ${{ env.SYSTEM_TEST_GIT_REF }} + path: system-test + - uses: actions/checkout@v3 + name: checkout soroban-tools + with: + path: soroban-tools + - if: ${{ env.SYSTEM_TEST_JS_STELLAR_SDK_GH_REPO != ''}} + name: prepare local js-stellar-sdk + run: | + rm -rf $GITHUB_WORKSPACE/system-test/js-stellar-sdk; + - if: ${{ env.SYSTEM_TEST_JS_STELLAR_SDK_GH_REPO != ''}} + uses: actions/checkout@v3 + with: + repository: ${{ env.SYSTEM_TEST_JS_STELLAR_SDK_GH_REPO }} + ref: ${{ env.SYSTEM_TEST_JS_STELLAR_SDK_GH_REF }} + path: system-test/js-stellar-sdk + - uses: stellar/actions/rust-cache@main + - name: Build system test with component versions + run: | + cd $GITHUB_WORKSPACE/system-test + if [ -z "$SYSTEM_TEST_JS_STELLAR_SDK_GH_REPO" ]; then \ + JS_STELLAR_SDK_REF="$SYSTEM_TEST_JS_STELLAR_SDK_NPM_VERSION"; \ + else \ + JS_STELLAR_SDK_REF="file:/home/tester/js-stellar-sdk"; \ + fi + make \ + CORE_GIT_REF=$SYSTEM_TEST_CORE_GIT_REF \ + CORE_COMPILE_CONFIGURE_FLAGS="$SYSTEM_TEST_CORE_COMPILE_CONFIGURE_FLAGS" \ + CORE_IMAGE=$SYSTEM_TEST_CORE_IMAGE \ + SOROBAN_RPC_GIT_REF=$SYSTEM_TEST_SOROBAN_TOOLS_REF \ + SOROBAN_CLI_GIT_REF=$SYSTEM_TEST_SOROBAN_TOOLS_REF \ + RUST_TOOLCHAIN_VERSION=$SYSTEM_TEST_RUST_TOOLCHAIN_VERSION \ + RS_XDR_GIT_REF=$SYSTEM_TEST_RS_XDR_GIT_REF \ + QUICKSTART_GIT_REF=$SYSTEM_TEST_QUICKSTART_GIT_REF \ + JS_STELLAR_SDK_NPM_VERSION=$JS_STELLAR_SDK_REF \ + build + - name: Run system test scenarios + run: | + docker run --rm -t --name e2e_test stellar/system-test:dev \ + --VerboseOutput $SYSTEM_TEST_VERBOSE_OUTPUT \ + --TestFilter "${{ matrix.scenario-filter }}" \ + --SorobanExamplesGitHash $SYSTEM_TEST_SOROBAN_EXAMPLES_GIT_HASH \ + --SorobanExamplesRepoURL $SYSTEM_TEST_SOROBAN_EXAMPLES_GIT_REPO diff --git a/.github/workflows/full-help-docs.yml b/.github/workflows/full-help-docs.yml new file mode 100644 index 00000000..c8fbd9e0 --- /dev/null +++ b/.github/workflows/full-help-docs.yml @@ -0,0 +1,32 @@ +name: CLI Help Doc + +on: [push, pull_request] + +permissions: + contents: read + # Optional: allow read access to pull request. Use with `only-new-issues` option. + pull-requests: read + +jobs: + doc_check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: stellar/actions/rust-cache@main + - run: rustup update + - name: Generate help doc + # this looks goofy to get GITHUB_OUTPUT to work with multi-line return values; + # see https://stackoverflow.com/a/74266196/249801 + run: | + cargo md-gen + raw_diff=$(git diff docs/soroban-cli-full-docs.md) + if [ "$raw_diff" != "" ]; then echo ""; echo "Unexpected docs change:"; echo "======================="; echo ""; echo "$raw_diff"; echo ""; echo "======================="; echo ""; fi + echo diff=$raw_diff >> $GITHUB_OUTPUT + id: doc-gen + + - name: Check diff + if: steps.doc-gen.outputs.diff != '' + uses: actions/github-script@v3 + with: + script: | + core.setFailed('Expected `doc-gen` to generate no diffs, but got diff shown in previous step.\n\nUpdate the full help docs:\n\n cargo md-gen\n\nDo this automatically on every commit by installing the pre-commit hook as explained in the README.') diff --git a/.github/workflows/golangci-lint.yml b/.github/workflows/golangci-lint.yml new file mode 100644 index 00000000..1781a6ef --- /dev/null +++ b/.github/workflows/golangci-lint.yml @@ -0,0 +1,41 @@ +name: Linters +on: + push: + branches: + - main + pull_request: + +permissions: + contents: read + # Optional: allow read access to pull request. Use with `only-new-issues` option. + pull-requests: read + +jobs: + golangci: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # version v3.0.2 + with: + fetch-depth: 0 # required for new-from-rev option in .golangci.yml + - name: Setup GO + uses: actions/setup-go@268d8c0ca0432bb2cf416faae41297df9d262d7f # version v3.3.0 + with: + go-version: '>=1.21.0' + + - name: Build libpreflight + run: | + rustup update + make build-libpreflight + + - name: Run golangci-lint + uses: golangci/golangci-lint-action@537aa1903e5d359d0b27dbc19ddd22c5087f3fbc # version v3.2.0 + with: + version: v1.51.1 # this is the golangci-lint version + args: --issues-exit-code=0 # exit without errors for now - won't fail the build + github-token: ${{ secrets.GITHUB_TOKEN }} + only-new-issues: true + + + + diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 00000000..b51a4852 --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,74 @@ +name: Publish + +on: + release: + types: [published] + +defaults: + run: + shell: bash + +jobs: + + publish: + uses: stellar/actions/.github/workflows/rust-publish.yml@main + secrets: + CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} + + upload: + needs: publish + strategy: + fail-fast: false + matrix: + include: + - os: ubuntu-20.04 # Use 20.04 to get an older version of glibc for increased compat + target: x86_64-unknown-linux-gnu + - os: ubuntu-20.04 # Use 20.04 to get an older version of glibc for increased compat + target: aarch64-unknown-linux-gnu + - os: macos-latest + target: x86_64-apple-darwin + - os: macos-latest + target: aarch64-apple-darwin + - os: windows-latest + target: x86_64-pc-windows-msvc + ext: .exe + runs-on: ${{ matrix.os }} + env: + VERSION: '${{ github.event.release.name }}' + NAME: 'soroban-cli-${{ github.event.release.name }}-${{ matrix.target }}' + steps: + - uses: actions/checkout@v3 + - run: rustup update + - run: rustup target add ${{ matrix.target }} + - if: matrix.target == 'aarch64-unknown-linux-gnu' + run: sudo apt-get update && sudo apt-get -y install gcc-aarch64-linux-gnu g++-aarch64-linux-gnu + - name: Package + run: cargo package --no-verify + - name: Build + env: + CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc + run: | + cd target/package + tar xvfz soroban-cli-$VERSION.crate + cd soroban-cli-$VERSION + cargo build --target-dir=../.. --features opt --release --target ${{ matrix.target }} + - name: Compress + run: | + cd target/${{ matrix.target }}/release + tar czvf $NAME.tar.gz soroban${{ matrix.ext }} + - uses: actions/upload-artifact@v3 + with: + name: ${{ env.NAME }} + path: 'target/${{ matrix.target }}/release/${{ env.NAME }}.tar.gz' + - name: Upload + uses: actions/github-script@v6 + with: + script: | + const fs = require('fs'); + await github.rest.repos.uploadReleaseAsset({ + owner: context.repo.owner, + repo: context.repo.repo, + release_id: ${{ github.event.release.id }}, + name: '${{ env.NAME }}.tar.gz', + data: fs.readFileSync('target/${{ matrix.target }}/release/${{ env.NAME }}.tar.gz'), + }); diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml new file mode 100644 index 00000000..9028df78 --- /dev/null +++ b/.github/workflows/rust.yml @@ -0,0 +1,124 @@ +name: Rust + +on: + push: + branches: [main, release/**] + pull_request: + +defaults: + run: + shell: bash + +jobs: + + complete: + if: always() + needs: [fmt, rust-analyzer-compat, build-and-test, publish-dry-run] + runs-on: ubuntu-latest + steps: + - if: contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') + run: exit 1 + + fmt: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - run: rustup update + - run: cargo fmt --all --check + + rust-analyzer-compat: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - run: rustup update + - run: rustup +nightly component add rust-analyzer + - name: Check if rust-analyzer encounters any errors parsing project + run: rustup run nightly rust-analyzer analysis-stats . 2>&1 | (! grep '^\[ERROR') + + build-and-test: + strategy: + fail-fast: false + matrix: + rust: [msrv, latest] + include: + - os: ubuntu-latest + target: x86_64-unknown-linux-gnu + - os: ubuntu-latest + target: aarch64-unknown-linux-gnu + - os: macos-latest + target: x86_64-apple-darwin + - os: macos-latest + target: aarch64-apple-darwin + - os: windows-latest-8-cores + target: x86_64-pc-windows-msvc + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v3 + - uses: stellar/actions/rust-cache@main + - name: Use the minimum supported Rust version + if: matrix.rust == 'msrv' + run: | + msrv="$(cargo metadata --format-version 1 --no-deps | jq -r '.packages | map(.rust_version) | map(values) | min')" + rustup override set $msrv + rustup component add clippy --toolchain $msrv + - name: Error on warnings and clippy checks + # Only error on warnings and checks for the msrv, because new versions of + # Rust will frequently add new warnings and checks. + if: matrix.rust == 'msrv' + run: echo RUSTFLAGS='-Dwarnings -Dclippy::all -Dclippy::pedantic' >> $GITHUB_ENV + - run: rustup update + - run: cargo version + - run: rustup target add ${{ matrix.target }} + - run: rustup target add wasm32-unknown-unknown + - if: matrix.target == 'aarch64-unknown-linux-gnu' + run: sudo apt-get update && sudo apt-get -y install gcc-aarch64-linux-gnu g++-aarch64-linux-gnu + - run: cargo clippy --all-targets --target ${{ matrix.target }} + - run: make build-test + - if: startsWith(matrix.target, 'x86_64') + # specify directories explicitly to avoid building the preflight library (otherwise it will fail with missing symbols) + run: | + for I in cmd/soroban-cli cmd/crates/* cmd/crates/soroban-test/tests/fixtures/test-wasms/hello_world ; do + cargo test --target ${{ matrix.target }} --manifest-path $I/Cargo.toml + done + + publish-dry-run: + if: github.event_name == 'push' || startsWith(github.head_ref, 'release/') + strategy: + fail-fast: false + matrix: + include: + - os: ubuntu-latest + target: x86_64-unknown-linux-gnu + cargo-hack-feature-options: --feature-powerset + - os: ubuntu-latest + target: aarch64-unknown-linux-gnu + cargo-hack-feature-options: --feature-powerset + - os: macos-latest + target: x86_64-apple-darwin + cargo-hack-feature-options: --feature-powerset + - os: macos-latest + target: aarch64-apple-darwin + cargo-hack-feature-options: --feature-powerset + # Windows builds notes: + # + # The different features that need testing are split over unique + # isolated builds for Windows, because there's a bug in Cargo [1] that + # causes builds of wasm-opt [2] to fail when run one after the other and + # attempting to clean up artifacts in between. The bug has been fixed, + # but will not make it into a stable release of Cargo until ~August + # 2023. + # + # [1]: https://github.com/rust-lang/cargo/pull/11442 + # [2]: https://github.com/brson/wasm-opt-rs/issues/116 + - os: windows-latest-8-cores + target: x86_64-pc-windows-msvc + cargo-hack-feature-options: '' + - os: windows-latest-8-cores + target: x86_64-pc-windows-msvc + cargo-hack-feature-options: --features opt --ignore-unknown-features + uses: stellar/actions/.github/workflows/rust-publish-dry-run-v2.yml@main + with: + crates: soroban-spec-tools soroban-spec-json soroban-spec-typescript soroban-test soroban-cli + runs-on: ${{ matrix.os }} + target: ${{ matrix.target }} + cargo-hack-feature-options: ${{ matrix.cargo-hack-feature-options }} diff --git a/.github/workflows/soroban-rpc.yml b/.github/workflows/soroban-rpc.yml new file mode 100644 index 00000000..459b391e --- /dev/null +++ b/.github/workflows/soroban-rpc.yml @@ -0,0 +1,129 @@ +name: Soroban RPC + +defaults: + run: + shell: bash + +on: + push: + branches: [main, release/**] + pull_request: + +jobs: + test: + name: Unit tests + strategy: + matrix: + os: [ubuntu-20.04, ubuntu-22.04] + go: [1.21] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v3 + with: + # For pull requests, build and test the PR head not a merge of the PR with the destination. + ref: ${{ github.event.pull_request.head.sha || github.ref }} + # We need to full history for git-restore-mtime to know what modification dates to use. + # Otherwise, the Go test cache will fail (due to the modification time of fixtures changing). + fetch-depth: "0" + - uses: ./.github/actions/setup-go + with: + go-version: ${{ matrix.go }} + - name: Build soroban contract fixtures + run: | + rustup update + rustup target add wasm32-unknown-unknown + make build-test-wasms + - run: make build-libpreflight + - run: go test -race -cover -timeout 25m -v ./cmd/soroban-rpc/... + + build: + name: Build + strategy: + matrix: + include: + - os: ubuntu-latest + rust_target: x86_64-unknown-linux-gnu + go_arch: amd64 + - os: ubuntu-latest + rust_target: aarch64-unknown-linux-gnu + go_arch: arm64 + - os: macos-latest + rust_target: x86_64-apple-darwin + go_arch: amd64 + - os: macos-latest + rust_target: aarch64-apple-darwin + go_arch: arm64 + - os: windows-latest + rust_target: x86_64-pc-windows-gnu + go_arch: amd64 + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v3 + # we cannot use our own ./.github/actions/setup-go action + # because it uses apt-get and some OSs (e.g. windows) don't have it + - uses: actions/setup-go@v3 + with: + go-version: 1.21 + + - run: | + rustup target add ${{ matrix.rust_target }} + rustup update + + # On windows, make sure we have the same compiler (linker) used by rust. + # This is important since the symbols names won't match otherwise. + - if: matrix.os == 'windows-latest' + name: Install the same mingw gcc compiler used by rust + run: | + C:/msys64/usr/bin/pacman.exe -S mingw-w64-x86_64-gcc --noconfirm + echo "CC=C:/msys64/mingw64/bin/gcc.exe" >> $GITHUB_ENV + echo "C:/msys64/mingw64/bin" >> $GITHUB_PATH + + # Use cross-compiler for linux aarch64 + - if: matrix.rust_target == 'aarch64-unknown-linux-gnu' + name: Install aarch64 cross-compilation toolchain + run: | + sudo apt-get update + sudo apt-get install -y gcc-10-aarch64-linux-gnu + echo 'CC=aarch64-linux-gnu-gcc-10' >> $GITHUB_ENV + + - name: Build libpreflight + run: make build-libpreflight + env: + CARGO_BUILD_TARGET: ${{ matrix.rust_target }} + + - name: Build Soroban RPC reproducible build + run: | + go build -trimpath -buildvcs=false ./cmd/soroban-rpc + ls -lh soroban-rpc + file soroban-rpc + env: + CGO_ENABLED: 1 + GOARCH: ${{ matrix.go_arch }} + + integration: + name: Integration tests + continue-on-error: true + strategy: + matrix: + os: [ubuntu-20.04, ubuntu-22.04] + go: [1.21] + test: ['.*CLI.*', '^Test(([^C])|(C[^L])|(CL[^I])).*$'] + runs-on: ${{ matrix.os }} + env: + SOROBAN_RPC_INTEGRATION_TESTS_ENABLED: true + SOROBAN_RPC_INTEGRATION_TESTS_CAPTIVE_CORE_BIN: /usr/bin/stellar-core + PROTOCOL_20_CORE_DEBIAN_PKG_VERSION: 20.1.0-1656.114b833e7.focal + steps: + - uses: actions/checkout@v3 + with: + # For pull requests, build and test the PR head not a merge of the PR with the destination. + ref: ${{ github.event.pull_request.head.sha || github.ref }} + # We need to full history for git-restore-mtime to know what modification dates to use. + # Otherwise, the Go test cache will fail (due to the modification time of fixtures changing). + fetch-depth: "0" + - uses: ./.github/actions/setup-integration-tests + with: + go-version: ${{ matrix.go }} + - name: Run Soroban RPC Integration Tests + run: | + go test -race -run '${{ matrix.test }}' -timeout 60m -v ./cmd/soroban-rpc/internal/test/... diff --git a/.github/workflows/update-completed-sprint-on-issue-closed.yml b/.github/workflows/update-completed-sprint-on-issue-closed.yml new file mode 100644 index 00000000..8ca1cf26 --- /dev/null +++ b/.github/workflows/update-completed-sprint-on-issue-closed.yml @@ -0,0 +1,25 @@ +name: Update CompletedSprint on Issue Closed + +on: + issues: + types: [closed] + pull_request: + types: [closed] + +jobs: + update-completed-sprint: + runs-on: ubuntu-latest + steps: + - name: Generate token + id: generate_token + uses: tibdex/github-app-token@v1 + with: + app_id: ${{ secrets.GH_PROJECT_MANAGEMENT_APP_ID }} + private_key: ${{ secrets.GH_PROJECT_MANAGEMENT_APP_PEM }} + - name: Update CompletedSprint on Issue Closed + id: update_completedsprint_on_issue_closed + uses: stellar/actions/update-completed-sprint-on-issue-closed@main + with: + project_name: "Platform Scrum" + field_name: "CompletedSprint" + project_token: ${{ steps.generate_token.outputs.token }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..129d79f5 --- /dev/null +++ b/.gitignore @@ -0,0 +1,5 @@ +target/ +captive-core/ +.soroban/ +!test.toml +*.sqlite diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 00000000..28fbe1f9 --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,122 @@ +linters-settings: + depguard: + dupl: + threshold: 100 + funlen: + lines: 100 + statements: 50 + goconst: + min-len: 2 + min-occurrences: 3 + gocritic: + enabled-tags: + - diagnostic + - experimental + - opinionated + - performance + - style + disabled-checks: + - dupImport # https://github.com/go-critic/go-critic/issues/845 + - ifElseChain + - octalLiteral + - whyNoLint + gocyclo: + min-complexity: 15 + goimports: + local-prefixes: github.com/golangci/golangci-lint + gomnd: + # don't include the "operation" and "assign" + checks: + - argument + - case + - condition + - return + ignored-numbers: + - '0' + - '1' + - '2' + - '3' + ignored-functions: + - strings.SplitN + + govet: + check-shadowing: true + settings: + printf: + funcs: + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf + lll: + line-length: 140 + misspell: + locale: US + nolintlint: + allow-unused: false # report any unused nolint directives + require-explanation: false # don't require an explanation for nolint directives + require-specific: false # don't require nolint directives to be specific about which linter is being skipped + +linters: + disable-all: true + enable: + - bodyclose + - depguard + - dogsled + - dupl + - errcheck + - exportloopref + #- funlen + - gochecknoinits + - goconst + #- gocritic + #- gocyclo + - gofmt + - goimports + #- gomnd + - goprintffuncname + - gosec + - gosimple + - govet + - ineffassign + #- lll + - misspell + - nakedret + - noctx + - nolintlint + - staticcheck + - stylecheck + - typecheck + - unconvert + - unparam + - unused + - whitespace + + # don't enable: + # - asciicheck + # - scopelint + # - gochecknoglobals + # - gocognit + # - godot + # - godox + # - goerr113 + # - interfacer + # - maligned + # - nestif + # - prealloc + # - testpackage + # - revive + # - wsl + +issues: + # Excluding configuration per-path, per-linter, per-text and per-source + exclude-rules: + - path: _test\.go + linters: + - govet + +run: + timeout: 5m + skip-dirs: + - docs + - vendor diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 00000000..451235fa --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,3738 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "aho-corasick" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" +dependencies = [ + "memchr", +] + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anstream" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d664a92ecae85fd0a7392615844904654d1d5f5514837f471ddef4a057aba1b6" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87" + +[[package]] +name = "anstyle-parse" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" +dependencies = [ + "anstyle", + "windows-sys 0.52.0", +] + +[[package]] +name = "anyhow" +version = "1.0.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" + +[[package]] +name = "arbitrary" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" +dependencies = [ + "derive_arbitrary", +] + +[[package]] +name = "assert_cmd" +version = "2.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88903cb14723e4d4003335bb7f8a14f27691649105346a0f0957466c096adfe6" +dependencies = [ + "anstyle", + "bstr", + "doc-comment", + "predicates 3.0.4", + "predicates-core", + "predicates-tree", + "wait-timeout", +] + +[[package]] +name = "assert_fs" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f070617a68e5c2ed5d06ee8dd620ee18fb72b99f6c094bed34cf8ab07c875b48" +dependencies = [ + "anstyle", + "doc-comment", + "globwalk", + "predicates 3.0.4", + "predicates-core", + "predicates-tree", + "tempfile", +] + +[[package]] +name = "async-trait" +version = "0.1.74" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "backtrace" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base16ct" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" + +[[package]] +name = "base32" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23ce669cd6c8588f79e15cf450314f9638f967fc5770ff1c7c1deb0925ea7cfa" + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.21.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9" + +[[package]] +name = "base64ct" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" + +[[package]] +name = "beef" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" +dependencies = [ + "serde", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" + +[[package]] +name = "block-buffer" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" +dependencies = [ + "generic-array", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "bstr" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "542f33a8835a0884b006a0c3df3dadd99c0c3f296ed26c2fdc8028e01ad6230c" +dependencies = [ + "memchr", + "regex-automata 0.4.3", + "serde", +] + +[[package]] +name = "bumpalo" +version = "3.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" + +[[package]] +name = "bytes-lit" +version = "0.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0adabf37211a5276e46335feabcbb1530c95eb3fdf85f324c7db942770aa025d" +dependencies = [ + "num-bigint", + "proc-macro2", + "quote", + "syn 2.0.39", +] + +[[package]] +name = "camino" +version = "1.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c59e92b5a388f549b863a7bea62612c09f24c8393560709a54558a9abdfb3b9c" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo-platform" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e34637b3140142bdf929fb439e8aa4ebad7651ebf7b1080b3930aa16ac1459ff" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo_metadata" +version = "0.15.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eee4243f1f26fc7a42710e7439c149e2b10b05472f88090acce52632f231a73a" +dependencies = [ + "camino", + "cargo-platform", + "semver", + "serde", + "serde_json", + "thiserror", +] + +[[package]] +name = "cc" +version = "1.0.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" +dependencies = [ + "jobserver", + "libc", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-targets 0.48.5", +] + +[[package]] +name = "clap" +version = "4.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfaff671f6b22ca62406885ece523383b9b64022e341e53e009a62ebc47a45f2" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap-markdown" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "325f50228f76921784b6d9f2d62de6778d834483248eefecd27279174797e579" +dependencies = [ + "clap", +] + +[[package]] +name = "clap_builder" +version = "4.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a216b506622bb1d316cd51328dce24e07bdff4a6128a47c7e7fad11878d5adbb" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_complete" +version = "4.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bffe91f06a11b4b9420f62103854e90867812cd5d01557f853c5ee8e791b12ae" +dependencies = [ + "clap", +] + +[[package]] +name = "clap_derive" +version = "4.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.39", +] + +[[package]] +name = "clap_lex" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" + +[[package]] +name = "codespan-reporting" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" +dependencies = [ + "termcolor", + "unicode-width", +] + +[[package]] +name = "colorchoice" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" + +[[package]] +name = "const-oid" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28c122c3980598d243d63d9a704629a2d748d101f278052ff068be5a4423ab6f" + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" + +[[package]] +name = "cpufeatures" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0" +dependencies = [ + "libc", +] + +[[package]] +name = "crate-git-revision" +version = "0.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f998aef136a4e7833b0e4f0fc0939a59c40140b28e0ffbf524ad84fb2cc568c8" +dependencies = [ + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "crate-git-revision" +version = "0.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c521bf1f43d31ed2f73441775ed31935d77901cb3451e44b38a1c1612fcbaf98" +dependencies = [ + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c3242926edf34aec4ac3a77108ad4854bffaa2e4ddc1824124ce59231302d5" +dependencies = [ + "cfg-if", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fca89a0e215bab21874660c67903c5f143333cab1da83d041c7ded6053774751" +dependencies = [ + "cfg-if", + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d2fe95351b870527a5d09bf563ed3c97c0cffb87cf1c78a591bf48bb218d9aa" +dependencies = [ + "autocfg", + "cfg-if", + "crossbeam-utils", + "memoffset", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d96137f14f244c37f989d9fff8f95e6c18b918e71f36638f8c49112e4c78f" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crypto-bigint" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" +dependencies = [ + "generic-array", + "rand_core 0.6.4", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "crypto-mac" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58bcd97a54c7ca5ce2f6eb16f6bede5b0ab5f0055fedc17d2f0b4466e21671ca" +dependencies = [ + "generic-array", + "subtle", +] + +[[package]] +name = "csv" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac574ff4d437a7b5ad237ef331c17ccca63c46479e5b5453eb8e10bb99a759fe" +dependencies = [ + "csv-core", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "csv-core" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5efa2b3d7902f4b634a20cae3c9c4e6209dc4779feb6863329607560143efa70" +dependencies = [ + "memchr", +] + +[[package]] +name = "ctor" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30d2b3721e861707777e3195b0158f950ae6dc4a27e4d02ff9f67e3eb3de199e" +dependencies = [ + "quote", + "syn 2.0.39", +] + +[[package]] +name = "curve25519-dalek" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b9fdf9972b2bd6af2d913799d9ebc165ea4d2e65878e329d9c6b372c4491b61" +dependencies = [ + "byteorder", + "digest 0.9.0", + "rand_core 0.5.1", + "subtle", + "zeroize", +] + +[[package]] +name = "curve25519-dalek" +version = "4.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89b8c6a2e4b1f45971ad09761aafb85514a84744b67a95e32c3cc1352d1f65c" +dependencies = [ + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest 0.10.7", + "fiat-crypto", + "platforms", + "rustc_version", + "subtle", + "zeroize", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", +] + +[[package]] +name = "cxx" +version = "1.0.110" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7129e341034ecb940c9072817cd9007974ea696844fc4dd582dc1653a7fbe2e8" +dependencies = [ + "cc", + "cxxbridge-flags", + "cxxbridge-macro", + "link-cplusplus", +] + +[[package]] +name = "cxx-build" +version = "1.0.110" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2a24f3f5f8eed71936f21e570436f024f5c2e25628f7496aa7ccd03b90109d5" +dependencies = [ + "cc", + "codespan-reporting", + "once_cell", + "proc-macro2", + "quote", + "scratch", + "syn 2.0.39", +] + +[[package]] +name = "cxxbridge-flags" +version = "1.0.110" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06fdd177fc61050d63f67f5bd6351fac6ab5526694ea8e359cd9cd3b75857f44" + +[[package]] +name = "cxxbridge-macro" +version = "1.0.110" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "587663dd5fb3d10932c8aecfe7c844db1bcf0aee93eeab08fac13dc1212c2e7f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", +] + +[[package]] +name = "darling" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0209d94da627ab5605dcccf08bb18afa5009cfbef48d8a8b7d7bdbc79be25c5e" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "177e3443818124b357d8e76f53be906d60937f0d3a90773a664fa63fa253e621" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 2.0.39", +] + +[[package]] +name = "darling_macro" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" +dependencies = [ + "darling_core", + "quote", + "syn 2.0.39", +] + +[[package]] +name = "der" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fffa369a668c8af7dbf8b5e56c9f744fbd399949ed171606040001947de40b1c" +dependencies = [ + "const-oid", + "zeroize", +] + +[[package]] +name = "deranged" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eb30d70a07a3b04884d2677f06bec33509dc67ca60d92949e5535352d3191dc" +dependencies = [ + "powerfmt", + "serde", +] + +[[package]] +name = "derive_arbitrary" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", +] + +[[package]] +name = "diff" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" + +[[package]] +name = "difflib" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" + +[[package]] +name = "digest" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" +dependencies = [ + "generic-array", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer 0.10.4", + "const-oid", + "crypto-common", + "subtle", +] + +[[package]] +name = "dirs" +version = "4.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-sys" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "doc-comment" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" + +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + +[[package]] +name = "downcast-rs" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ea835d29036a4087793836fa931b08837ad5e957da9e23886b29586fb9b6650" + +[[package]] +name = "ecdsa" +version = "0.16.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" +dependencies = [ + "der", + "digest 0.10.7", + "elliptic-curve", + "rfc6979", + "signature 2.2.0", + "spki", +] + +[[package]] +name = "ed25519" +version = "1.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91cff35c70bba8a626e3185d8cd48cc11b5437e1a5bcd15b9b5fa3c64b6dfee7" +dependencies = [ + "signature 1.6.4", +] + +[[package]] +name = "ed25519" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" +dependencies = [ + "pkcs8", + "signature 2.2.0", +] + +[[package]] +name = "ed25519-dalek" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" +dependencies = [ + "curve25519-dalek 3.2.0", + "ed25519 1.5.3", + "sha2 0.9.9", + "zeroize", +] + +[[package]] +name = "ed25519-dalek" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7277392b266383ef8396db7fdeb1e77b6c52fed775f5df15bb24f35b72156980" +dependencies = [ + "curve25519-dalek 4.1.1", + "ed25519 2.2.3", + "rand_core 0.6.4", + "serde", + "sha2 0.10.8", + "zeroize", +] + +[[package]] +name = "either" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" + +[[package]] +name = "elliptic-curve" +version = "0.13.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" +dependencies = [ + "base16ct", + "crypto-bigint", + "digest 0.10.7", + "ff", + "generic-array", + "group", + "pkcs8", + "rand_core 0.6.4", + "sec1", + "subtle", + "zeroize", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "escape-bytes" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bfcf67fea2815c2fc3b90873fae90957be12ff417335dfadc7f52927feb03b2" + +[[package]] +name = "ethnum" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b90ca2580b73ab6a1f724b76ca11ab632df820fd6040c336200d2c1df7b3c82c" + +[[package]] +name = "fastrand" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" + +[[package]] +name = "ff" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" +dependencies = [ + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "fiat-crypto" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27573eac26f4dd11e2b1916c3fe1baa56407c83c71a773a8ba17ec0bca03b6b7" + +[[package]] +name = "float-cmp" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4" +dependencies = [ + "num-traits", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + +[[package]] +name = "futures-channel" +version = "0.3.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff4dd66668b557604244583e3e1e1eada8c5c2e96a6d0d6653ede395b78bbacb" +dependencies = [ + "futures-core", +] + +[[package]] +name = "futures-core" +version = "0.3.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb1d22c66e66d9d72e1758f0bd7d4fd0bee04cad842ee34587d68c07e45d088c" + +[[package]] +name = "futures-sink" +version = "0.3.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e36d3378ee38c2a36ad710c5d30c2911d752cb941c00c72dbabfb786a7970817" + +[[package]] +name = "futures-task" +version = "0.3.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efd193069b0ddadc69c46389b740bbccdd97203899b48d09c5f7969591d6bae2" + +[[package]] +name = "futures-util" +version = "0.3.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a19526d624e703a3179b3d322efec918b6246ea0fa51d41124525f00f1cc8104" +dependencies = [ + "futures-core", + "futures-sink", + "futures-task", + "pin-project-lite", + "pin-utils", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", + "zeroize", +] + +[[package]] +name = "getrandom" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi", + "wasm-bindgen", +] + +[[package]] +name = "gimli" +version = "0.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" + +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + +[[package]] +name = "globset" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1" +dependencies = [ + "aho-corasick", + "bstr", + "log", + "regex-automata 0.4.3", + "regex-syntax 0.8.2", +] + +[[package]] +name = "globwalk" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93e3af942408868f6934a7b85134a3230832b9977cf66125df2f9edcfce4ddcc" +dependencies = [ + "bitflags 1.3.2", + "ignore", + "walkdir", +] + +[[package]] +name = "group" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" +dependencies = [ + "ff", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "h2" +version = "0.3.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d6250322ef6e60f93f9a2162799302cd6f68f79f6e5d85c8c16f14d1d958178" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap 2.1.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "hermit-abi" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +dependencies = [ + "serde", +] + +[[package]] +name = "hex-literal" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46" + +[[package]] +name = "hmac" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "deae6d9dbb35ec2c502d62b8f7b1c000a0822c3b0794ba36b3149c0a1c840dff" +dependencies = [ + "crypto-mac", + "digest 0.9.0", +] + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "home" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" +dependencies = [ + "windows-sys 0.48.0", +] + +[[package]] +name = "http" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hyper" +version = "0.14.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2 0.4.10", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http", + "hyper", + "log", + "rustls", + "rustls-native-certs", + "tokio", + "tokio-rustls", +] + +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes", + "hyper", + "native-tls", + "tokio", + "tokio-native-tls", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.58" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8326b86b6cff230b97d0d312a6c40a60726df3332e721f72a1b035f451663b20" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "ignore" +version = "0.4.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "747ad1b4ae841a78e8aba0d63adbfbeaea26b517b63705d47856b73015d27060" +dependencies = [ + "crossbeam-deque", + "globset", + "log", + "memchr", + "regex-automata 0.4.3", + "same-file", + "walkdir", + "winapi-util", +] + +[[package]] +name = "include_dir" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18762faeff7122e89e0857b02f7ce6fcc0d101d5e9ad2ad7846cc01d61b7f19e" +dependencies = [ + "glob", + "include_dir_macros", +] + +[[package]] +name = "include_dir_macros" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b139284b5cf57ecfa712bcc66950bb635b31aff41c188e8a4cfc758eca374a3f" +dependencies = [ + "proc-macro2", + "quote", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "serde", +] + +[[package]] +name = "indexmap" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" +dependencies = [ + "equivalent", + "hashbrown 0.14.3", + "serde", +] + +[[package]] +name = "indexmap-nostd" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e04e2fd2b8188ea827b32ef11de88377086d690286ab35747ef7f9bf3ccb590" + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" + +[[package]] +name = "jobserver" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c37f63953c4c63420ed5fd3d6d398c719489b9f872b9fa683262f8edd363c7d" +dependencies = [ + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.66" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cee9c64da59eae3b50095c18d3e74f8b73c0b86d2792824ff01bbce68ba229ca" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "jsonrpsee-core" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da2327ba8df2fdbd5e897e2b5ed25ce7f299d345b9736b6828814c3dbd1fd47b" +dependencies = [ + "anyhow", + "async-trait", + "beef", + "futures-util", + "hyper", + "jsonrpsee-types", + "serde", + "serde_json", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "jsonrpsee-http-client" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f80c17f62c7653ce767e3d7288b793dfec920f97067ceb189ebdd3570f2bc20" +dependencies = [ + "async-trait", + "hyper", + "hyper-rustls", + "jsonrpsee-core", + "jsonrpsee-types", + "serde", + "serde_json", + "thiserror", + "tokio", + "tower", + "tracing", + "url", +] + +[[package]] +name = "jsonrpsee-types" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5be0be325642e850ed0bdff426674d2e66b2b7117c9be23a7caef68a2902b7d9" +dependencies = [ + "anyhow", + "beef", + "serde", + "serde_json", + "thiserror", + "tracing", +] + +[[package]] +name = "k256" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cadb76004ed8e97623117f3df85b17aaa6626ab0b0831e6573f104df16cd1bcc" +dependencies = [ + "cfg-if", + "ecdsa", + "elliptic-curve", + "once_cell", + "sha2 0.10.8", + "signature 2.2.0", +] + +[[package]] +name = "keccak" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f6d5ed8676d904364de097082f4e7d240b571b67989ced0240f08b7f966f940" +dependencies = [ + "cpufeatures", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.151" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" + +[[package]] +name = "libm" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" + +[[package]] +name = "libredox" +version = "0.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8" +dependencies = [ + "bitflags 2.4.1", + "libc", + "redox_syscall", +] + +[[package]] +name = "link-cplusplus" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d240c6f7e1ba3a28b0249f774e6a9dd0175054b52dfbb61b16eb8505c3785c9" +dependencies = [ + "cc", +] + +[[package]] +name = "linux-raw-sys" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" + +[[package]] +name = "lock_api" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "memchr" +version = "2.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" + +[[package]] +name = "memoffset" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" +dependencies = [ + "autocfg", +] + +[[package]] +name = "miniz_oxide" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.48.0", +] + +[[package]] +name = "native-tls" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +dependencies = [ + "lazy_static", + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "normalize-line-endings" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num-bigint" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-derive" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfb77679af88f8b125209d354a202862602672222e7f2313fdd6dc349bad4712" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", +] + +[[package]] +name = "num-integer" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +dependencies = [ + "autocfg", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "object" +version = "0.32.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "opaque-debug" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" + +[[package]] +name = "openssl" +version = "0.10.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b8419dc8cc6d866deb801274bba2e6f8f6108c1bb7fcc10ee5ab864931dbb45" +dependencies = [ + "bitflags 2.4.1", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-src" +version = "300.2.1+3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fe476c29791a5ca0d1273c697e96085bbabbbea2ef7afd5617e78a4b40332d3" +dependencies = [ + "cc", +] + +[[package]] +name = "openssl-sys" +version = "0.9.97" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3eaad34cdd97d81de97964fc7f29e2d104f483840d906ef56daa1912338460b" +dependencies = [ + "cc", + "libc", + "openssl-src", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets 0.48.5", +] + +[[package]] +name = "paste" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" + +[[package]] +name = "pathdiff" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" + +[[package]] +name = "pbkdf2" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pin-project" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "pkg-config" +version = "0.3.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" + +[[package]] +name = "platforms" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14e6ab3f592e6fb464fc9712d8d6e6912de6473954635fd76a589d832cffcbb0" + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "predicates" +version = "2.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59230a63c37f3e18569bdb90e4a89cbf5bf8b06fea0b84e65ea10cc4df47addd" +dependencies = [ + "difflib", + "float-cmp", + "itertools 0.10.5", + "normalize-line-endings", + "predicates-core", + "regex", +] + +[[package]] +name = "predicates" +version = "3.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dfc28575c2e3f19cb3c73b93af36460ae898d426eba6fc15b9bd2a5220758a0" +dependencies = [ + "anstyle", + "difflib", + "itertools 0.11.0", + "predicates-core", +] + +[[package]] +name = "predicates-core" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b794032607612e7abeb4db69adb4e33590fa6cf1149e95fd7cb00e634b92f174" + +[[package]] +name = "predicates-tree" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "368ba315fb8c5052ab692e68a0eefec6ec57b23a36959c14496f0b0df2c0cecf" +dependencies = [ + "predicates-core", + "termtree", +] + +[[package]] +name = "preflight" +version = "20.2.0" +dependencies = [ + "base64 0.21.5", + "libc", + "sha2 0.10.8", + "soroban-env-host", + "soroban-simulation", +] + +[[package]] +name = "pretty_assertions" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66" +dependencies = [ + "diff", + "yansi", +] + +[[package]] +name = "prettyplease" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae005bd773ab59b4725093fd7df83fd7892f7d8eafb48dbd7de6e024e4215f9d" +dependencies = [ + "proc-macro2", + "syn 2.0.39", +] + +[[package]] +name = "proc-macro2" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "redox_syscall" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "redox_users" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4" +dependencies = [ + "getrandom", + "libredox", + "thiserror", +] + +[[package]] +name = "regex" +version = "1.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata 0.4.3", + "regex-syntax 0.8.2", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-automata" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.8.2", +] + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" + +[[package]] +name = "rfc6979" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" +dependencies = [ + "hmac 0.12.1", + "subtle", +] + +[[package]] +name = "ring" +version = "0.17.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "688c63d65483050968b2a8937f7995f443e27041a0f7700aa59b0822aedebb74" +dependencies = [ + "cc", + "getrandom", + "libc", + "spin", + "untrusted", + "windows-sys 0.48.0", +] + +[[package]] +name = "rpassword" +version = "7.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80472be3c897911d0137b2d2b9055faf6eeac5b14e324073d83bc17b191d7e3f" +dependencies = [ + "libc", + "rtoolbox", + "windows-sys 0.48.0", +] + +[[package]] +name = "rtoolbox" +version = "0.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c247d24e63230cdb56463ae328478bd5eac8b8faa8c69461a77e8e323afac90e" +dependencies = [ + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "0.38.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316" +dependencies = [ + "bitflags 2.4.1", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustls" +version = "0.21.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" +dependencies = [ + "log", + "ring", + "rustls-webpki", + "sct", +] + +[[package]] +name = "rustls-native-certs" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" +dependencies = [ + "openssl-probe", + "rustls-pemfile", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.5", +] + +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" + +[[package]] +name = "ryu" +version = "1.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schannel" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" +dependencies = [ + "windows-sys 0.48.0", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "scratch" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3cf7c11c38cb994f3d40e8a8cde3bbd1f72a435e4c49e85d6553d8312306152" + +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "sec1" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" +dependencies = [ + "base16ct", + "der", + "generic-array", + "pkcs8", + "subtle", + "zeroize", +] + +[[package]] +name = "security-framework" +version = "2.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090" +dependencies = [ + "serde", +] + +[[package]] +name = "sep5" +version = "0.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9afe34ccbd1fb6fa0b2fc7cccb037bd3d3f1e484c3befe1b713d7611884f336a" +dependencies = [ + "slip10", + "stellar-strkey 0.0.7", + "thiserror", + "tiny-bip39", +] + +[[package]] +name = "serde" +version = "1.0.192" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bca2a08484b285dcb282d0f67b26cadc0df8b19f8c12502c13d966bf9482f001" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde-aux" +version = "4.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "184eba62ebddb71658697c8b08822edee89970bf318c5362189f0de27f85b498" +dependencies = [ + "chrono", + "serde", + "serde_json", +] + +[[package]] +name = "serde_derive" +version = "1.0.192" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6c7207fbec9faa48073f3e3074cbe553af6ea512d7c21ba46e434e70ea9fbc1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", +] + +[[package]] +name = "serde_json" +version = "1.0.108" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64cd236ccc1b7a29e7e2739f27c0b2dd199804abc4290e32f59f3b68d6405c23" +dependencies = [ + "base64 0.21.5", + "chrono", + "hex", + "indexmap 1.9.3", + "indexmap 2.1.0", + "serde", + "serde_json", + "serde_with_macros", + "time", +] + +[[package]] +name = "serde_with_macros" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93634eb5f75a2323b16de4748022ac4297f9e76b6dced2be287a099f41b5e788" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn 2.0.39", +] + +[[package]] +name = "sha2" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if", + "cpufeatures", + "digest 0.9.0", + "opaque-debug", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", +] + +[[package]] +name = "sha3" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" +dependencies = [ + "digest 0.10.7", + "keccak", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shlex" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7cee0529a6d40f580e7a5e6c495c8fbfe21b7b52795ed4bb5e62cdf92bc6380" + +[[package]] +name = "signal-hook-registry" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +dependencies = [ + "libc", +] + +[[package]] +name = "signature" +version = "1.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" + +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest 0.10.7", + "rand_core 0.6.4", +] + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "slip10" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28724a6e6f70b0cb115c580891483da6f3aa99e6a353598303a57f89d23aa6bc" +dependencies = [ + "ed25519-dalek 1.0.1", + "hmac 0.9.0", + "sha2 0.9.9", +] + +[[package]] +name = "smallvec" +version = "1.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970" + +[[package]] +name = "socket2" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "socket2" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" +dependencies = [ + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "soroban-builtin-sdk-macros" +version = "20.1.0" +source = "git+https://github.com/stellar/rs-soroban-env?rev=36d33cb6c986c9a8a9200b7eb04cf02e2c3f0ef4#36d33cb6c986c9a8a9200b7eb04cf02e2c3f0ef4" +dependencies = [ + "itertools 0.11.0", + "proc-macro2", + "quote", + "syn 2.0.39", +] + +[[package]] +name = "soroban-cli" +version = "20.2.0" +dependencies = [ + "assert_cmd", + "assert_fs", + "base64 0.21.5", + "cargo_metadata", + "chrono", + "clap", + "clap-markdown", + "clap_complete", + "crate-git-revision 0.0.4", + "csv", + "dirs", + "dotenvy", + "ed25519-dalek 2.0.0", + "ethnum", + "heck", + "hex", + "http", + "hyper", + "hyper-tls", + "itertools 0.10.5", + "jsonrpsee-core", + "jsonrpsee-http-client", + "num-bigint", + "openssl", + "pathdiff", + "predicates 2.1.5", + "rand", + "regex", + "rpassword", + "sep5", + "serde", + "serde-aux", + "serde_derive", + "serde_json", + "sha2 0.10.8", + "shlex", + "soroban-env-host", + "soroban-ledger-snapshot", + "soroban-sdk", + "soroban-spec", + "soroban-spec-json", + "soroban-spec-rust", + "soroban-spec-tools", + "soroban-spec-typescript", + "stellar-strkey 0.0.7", + "stellar-xdr", + "strsim", + "termcolor", + "termcolor_output", + "thiserror", + "tokio", + "toml", + "tracing", + "tracing-appender", + "tracing-subscriber", + "wasm-opt", + "wasmparser 0.90.0", + "which", +] + +[[package]] +name = "soroban-env-common" +version = "20.1.0" +source = "git+https://github.com/stellar/rs-soroban-env?rev=36d33cb6c986c9a8a9200b7eb04cf02e2c3f0ef4#36d33cb6c986c9a8a9200b7eb04cf02e2c3f0ef4" +dependencies = [ + "arbitrary", + "crate-git-revision 0.0.6", + "ethnum", + "num-derive", + "num-traits", + "serde", + "soroban-env-macros", + "soroban-wasmi", + "static_assertions", + "stellar-xdr", +] + +[[package]] +name = "soroban-env-guest" +version = "20.1.0" +source = "git+https://github.com/stellar/rs-soroban-env?rev=36d33cb6c986c9a8a9200b7eb04cf02e2c3f0ef4#36d33cb6c986c9a8a9200b7eb04cf02e2c3f0ef4" +dependencies = [ + "soroban-env-common", + "static_assertions", +] + +[[package]] +name = "soroban-env-host" +version = "20.1.0" +source = "git+https://github.com/stellar/rs-soroban-env?rev=36d33cb6c986c9a8a9200b7eb04cf02e2c3f0ef4#36d33cb6c986c9a8a9200b7eb04cf02e2c3f0ef4" +dependencies = [ + "backtrace", + "curve25519-dalek 4.1.1", + "ed25519-dalek 2.0.0", + "getrandom", + "hex-literal", + "hmac 0.12.1", + "k256", + "num-derive", + "num-integer", + "num-traits", + "rand", + "rand_chacha", + "sha2 0.10.8", + "sha3", + "soroban-builtin-sdk-macros", + "soroban-env-common", + "soroban-wasmi", + "static_assertions", + "stellar-strkey 0.0.8", +] + +[[package]] +name = "soroban-env-macros" +version = "20.1.0" +source = "git+https://github.com/stellar/rs-soroban-env?rev=36d33cb6c986c9a8a9200b7eb04cf02e2c3f0ef4#36d33cb6c986c9a8a9200b7eb04cf02e2c3f0ef4" +dependencies = [ + "itertools 0.11.0", + "proc-macro2", + "quote", + "serde", + "serde_json", + "stellar-xdr", + "syn 2.0.39", +] + +[[package]] +name = "soroban-hello" +version = "20.2.0" + +[[package]] +name = "soroban-ledger-snapshot" +version = "20.1.0" +source = "git+https://github.com/stellar/rs-soroban-sdk?rev=e6c2c900ab82b5f6eec48f69cb2cb519e19819cb#e6c2c900ab82b5f6eec48f69cb2cb519e19819cb" +dependencies = [ + "serde", + "serde_json", + "serde_with", + "soroban-env-common", + "soroban-env-host", + "thiserror", +] + +[[package]] +name = "soroban-sdk" +version = "20.1.0" +source = "git+https://github.com/stellar/rs-soroban-sdk?rev=e6c2c900ab82b5f6eec48f69cb2cb519e19819cb#e6c2c900ab82b5f6eec48f69cb2cb519e19819cb" +dependencies = [ + "arbitrary", + "bytes-lit", + "ctor", + "ed25519-dalek 2.0.0", + "rand", + "serde", + "serde_json", + "soroban-env-guest", + "soroban-env-host", + "soroban-ledger-snapshot", + "soroban-sdk-macros", + "stellar-strkey 0.0.8", +] + +[[package]] +name = "soroban-sdk-macros" +version = "20.1.0" +source = "git+https://github.com/stellar/rs-soroban-sdk?rev=e6c2c900ab82b5f6eec48f69cb2cb519e19819cb#e6c2c900ab82b5f6eec48f69cb2cb519e19819cb" +dependencies = [ + "crate-git-revision 0.0.6", + "darling", + "itertools 0.11.0", + "proc-macro2", + "quote", + "rustc_version", + "sha2 0.10.8", + "soroban-env-common", + "soroban-spec", + "soroban-spec-rust", + "stellar-xdr", + "syn 2.0.39", +] + +[[package]] +name = "soroban-simulation" +version = "20.1.0" +source = "git+https://github.com/stellar/rs-soroban-env?rev=36d33cb6c986c9a8a9200b7eb04cf02e2c3f0ef4#36d33cb6c986c9a8a9200b7eb04cf02e2c3f0ef4" +dependencies = [ + "anyhow", + "rand", + "soroban-env-host", + "static_assertions", + "thiserror", +] + +[[package]] +name = "soroban-spec" +version = "20.1.0" +source = "git+https://github.com/stellar/rs-soroban-sdk?rev=e6c2c900ab82b5f6eec48f69cb2cb519e19819cb#e6c2c900ab82b5f6eec48f69cb2cb519e19819cb" +dependencies = [ + "base64 0.13.1", + "stellar-xdr", + "thiserror", + "wasmparser 0.88.0", +] + +[[package]] +name = "soroban-spec-json" +version = "20.2.0" +dependencies = [ + "pretty_assertions", + "serde", + "serde_derive", + "serde_json", + "sha2 0.9.9", + "soroban-spec", + "stellar-xdr", + "thiserror", +] + +[[package]] +name = "soroban-spec-rust" +version = "20.1.0" +source = "git+https://github.com/stellar/rs-soroban-sdk?rev=e6c2c900ab82b5f6eec48f69cb2cb519e19819cb#e6c2c900ab82b5f6eec48f69cb2cb519e19819cb" +dependencies = [ + "prettyplease", + "proc-macro2", + "quote", + "sha2 0.10.8", + "soroban-spec", + "stellar-xdr", + "syn 2.0.39", + "thiserror", +] + +[[package]] +name = "soroban-spec-tools" +version = "20.2.0" +dependencies = [ + "base64 0.21.5", + "ethnum", + "hex", + "itertools 0.10.5", + "serde_json", + "soroban-spec", + "stellar-strkey 0.0.7", + "stellar-xdr", + "thiserror", + "tokio", + "wasmparser 0.90.0", + "which", +] + +[[package]] +name = "soroban-spec-typescript" +version = "20.2.0" +dependencies = [ + "base64 0.21.5", + "heck", + "include_dir", + "itertools 0.10.5", + "pretty_assertions", + "prettyplease", + "serde", + "serde_derive", + "serde_json", + "sha2 0.9.9", + "soroban-spec", + "stellar-xdr", + "temp-dir", + "thiserror", + "walkdir", +] + +[[package]] +name = "soroban-test" +version = "20.2.0" +dependencies = [ + "assert_cmd", + "assert_fs", + "fs_extra", + "predicates 2.1.5", + "sep5", + "serde_json", + "sha2 0.10.8", + "soroban-cli", + "soroban-env-host", + "soroban-ledger-snapshot", + "soroban-sdk", + "soroban-spec", + "soroban-spec-tools", + "stellar-strkey 0.0.7", + "thiserror", + "tokio", + "which", +] + +[[package]] +name = "soroban-token-sdk" +version = "20.1.0" +source = "git+https://github.com/stellar/rs-soroban-sdk?rev=e6c2c900ab82b5f6eec48f69cb2cb519e19819cb#e6c2c900ab82b5f6eec48f69cb2cb519e19819cb" +dependencies = [ + "soroban-sdk", +] + +[[package]] +name = "soroban-wasmi" +version = "0.31.1-soroban.20.0.0" +source = "git+https://github.com/stellar/wasmi?rev=ab29800224d85ee64d4ac127bac84cdbb0276721#ab29800224d85ee64d4ac127bac84cdbb0276721" +dependencies = [ + "smallvec", + "spin", + "wasmi_arena", + "wasmi_core", + "wasmparser-nostd", +] + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "stellar-strkey" +version = "0.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0689070126ca7f2effc2c5726584446db52190f0cef043c02eb4040a711c11" +dependencies = [ + "base32", + "thiserror", +] + +[[package]] +name = "stellar-strkey" +version = "0.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12d2bf45e114117ea91d820a846fd1afbe3ba7d717988fee094ce8227a3bf8bd" +dependencies = [ + "base32", + "crate-git-revision 0.0.6", + "thiserror", +] + +[[package]] +name = "stellar-xdr" +version = "20.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9f00a85bd9b1617d4cb7e741733889c9940e6bdeca360db81752b0ef04fe3a5" +dependencies = [ + "arbitrary", + "base64 0.13.1", + "clap", + "crate-git-revision 0.0.6", + "escape-bytes", + "hex", + "serde", + "serde_json", + "serde_with", + "stellar-strkey 0.0.8", + "thiserror", +] + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "strum" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" + +[[package]] +name = "strum_macros" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "rustversion", + "syn 1.0.109", +] + +[[package]] +name = "subtle" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "temp-dir" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af547b166dd1ea4b472165569fc456cfb6818116f854690b0ff205e636523dab" + +[[package]] +name = "tempfile" +version = "3.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5" +dependencies = [ + "cfg-if", + "fastrand", + "redox_syscall", + "rustix", + "windows-sys 0.48.0", +] + +[[package]] +name = "termcolor" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff1bc3d3f05aff0403e8ac0d92ced918ec05b666a43f83297ccef5bea8a3d449" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "termcolor_output" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0363afbf20990ea53a69c03b71800480aaf90e8f49f6fd5385ecc302062895ff" +dependencies = [ + "termcolor", + "termcolor_output_impl", +] + +[[package]] +name = "termcolor_output_impl" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f34dde0bb841eb3762b42bdff8db11bbdbc0a3bd7b32012955f5ce1d081f86c1" + +[[package]] +name = "termtree" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" + +[[package]] +name = "test_custom_types" +version = "20.2.0" +dependencies = [ + "soroban-sdk", +] + +[[package]] +name = "test_hello_world" +version = "20.2.0" +dependencies = [ + "soroban-sdk", +] + +[[package]] +name = "test_swap" +version = "20.2.0" +dependencies = [ + "soroban-sdk", +] + +[[package]] +name = "test_token" +version = "20.2.0" +dependencies = [ + "soroban-sdk", + "soroban-token-sdk", +] + +[[package]] +name = "test_udt" +version = "20.2.0" +dependencies = [ + "soroban-sdk", +] + +[[package]] +name = "thiserror" +version = "1.0.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", +] + +[[package]] +name = "thread_local" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" +dependencies = [ + "cfg-if", + "once_cell", +] + +[[package]] +name = "time" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5" +dependencies = [ + "deranged", + "itoa", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20" +dependencies = [ + "time-core", +] + +[[package]] +name = "tiny-bip39" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62cc94d358b5a1e84a5cb9109f559aa3c4d634d2b1b4de3d0fa4adc7c78e2861" +dependencies = [ + "anyhow", + "hmac 0.12.1", + "once_cell", + "pbkdf2", + "rand", + "rustc-hash", + "sha2 0.10.8", + "thiserror", + "unicode-normalization", + "wasm-bindgen", + "zeroize", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.35.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d45b238a16291a4e1584e61820b8ae57d696cc5015c459c229ccc6990cc1c" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "num_cpus", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2 0.5.5", + "tokio-macros", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-macros" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + +[[package]] +name = "toml" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +dependencies = [ + "serde", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "pin-project", + "pin-project-lite", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-appender" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf" +dependencies = [ + "crossbeam-channel", + "thiserror", + "time", + "tracing-subscriber", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "unicode-bidi" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f2528f27a9eb2b21e69c95319b30bd0efd85d09c379741b0f78ea1d86be2416" + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-width" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "utf8parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "wait-timeout" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +dependencies = [ + "libc", +] + +[[package]] +name = "walkdir" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ed0d4f68a3015cc185aff4db9506a015f4b96f95303897bfa23f846db54064e" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b56f625e64f3a1084ded111c4d5f477df9f8c92df113852fa5a374dbda78826" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.39", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0162dbf37223cd2afce98f3d0785506dcb8d266223983e4b5b525859e6e182b2" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ab9b36309365056cd639da3134bf87fa8f3d86008abf99e612384a6eecd459f" + +[[package]] +name = "wasm-opt" +version = "0.114.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "effbef3bd1dde18acb401f73e740a6f3d4a1bc651e9773bddc512fe4d8d68f67" +dependencies = [ + "anyhow", + "libc", + "strum", + "strum_macros", + "tempfile", + "thiserror", + "wasm-opt-cxx-sys", + "wasm-opt-sys", +] + +[[package]] +name = "wasm-opt-cxx-sys" +version = "0.114.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c09e24eb283919ace2ed5733bda4842a59ce4c8de110ef5c6d98859513d17047" +dependencies = [ + "anyhow", + "cxx", + "cxx-build", + "wasm-opt-sys", +] + +[[package]] +name = "wasm-opt-sys" +version = "0.114.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36f2f817bed2e8d65eb779fa37317e74de15585751f903c9118342d1970703a4" +dependencies = [ + "anyhow", + "cc", + "cxx", + "cxx-build", +] + +[[package]] +name = "wasmi_arena" +version = "0.4.0" +source = "git+https://github.com/stellar/wasmi?rev=ab29800224d85ee64d4ac127bac84cdbb0276721#ab29800224d85ee64d4ac127bac84cdbb0276721" + +[[package]] +name = "wasmi_core" +version = "0.13.0" +source = "git+https://github.com/stellar/wasmi?rev=ab29800224d85ee64d4ac127bac84cdbb0276721#ab29800224d85ee64d4ac127bac84cdbb0276721" +dependencies = [ + "downcast-rs", + "libm", + "num-traits", + "paste", +] + +[[package]] +name = "wasmparser" +version = "0.88.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb8cf7dd82407fe68161bedcd57fde15596f32ebf6e9b3bdbf3ae1da20e38e5e" +dependencies = [ + "indexmap 1.9.3", +] + +[[package]] +name = "wasmparser" +version = "0.90.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b62c8d843f4423efee314dc75a1049886deba3214f7e7f9ff0e4e58b4d618581" +dependencies = [ + "indexmap 1.9.3", +] + +[[package]] +name = "wasmparser-nostd" +version = "0.100.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9157cab83003221bfd385833ab587a039f5d6fa7304854042ba358a3b09e0724" +dependencies = [ + "indexmap-nostd", +] + +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "regex", + "rustix", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-core" +version = "0.51.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1f8cf84f35d2db49a46868f947758c7a1138116f7fac3bc844f43ade1292e64" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.0", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" +dependencies = [ + "windows_aarch64_gnullvm 0.52.0", + "windows_aarch64_msvc 0.52.0", + "windows_i686_gnu 0.52.0", + "windows_i686_msvc 0.52.0", + "windows_x86_64_gnu 0.52.0", + "windows_x86_64_gnullvm 0.52.0", + "windows_x86_64_msvc 0.52.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" + +[[package]] +name = "yansi" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" + +[[package]] +name = "zeroize" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" +dependencies = [ + "zeroize_derive", +] + +[[package]] +name = "zeroize_derive" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 00000000..c88cd4ab --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,116 @@ +[workspace] +resolver = "2" +members = [ + "cmd/soroban-cli", + "cmd/crates/*", + "cmd/crates/soroban-test/tests/fixtures/test-wasms/*", + "cmd/crates/soroban-test/tests/fixtures/hello", + "cmd/soroban-rpc/lib/preflight", +] +default-members = ["cmd/soroban-cli", "cmd/crates/soroban-test"] +exclude = ["cmd/crates/soroban-test/tests/fixtures/hello"] + +[workspace.package] +version = "20.2.0" +rust-version = "1.74.0" + +[workspace.dependencies.soroban-env-host] +version = "=20.1.0" +git = "https://github.com/stellar/rs-soroban-env" +rev = "36d33cb6c986c9a8a9200b7eb04cf02e2c3f0ef4" +# path = "../rs-soroban-env/soroban-env-host" + +[workspace.dependencies.soroban-simulation] +version = "=20.1.0" +git = "https://github.com/stellar/rs-soroban-env" +rev = "36d33cb6c986c9a8a9200b7eb04cf02e2c3f0ef4" +# path = "../rs-soroban-env/soroban-simulation" + +[workspace.dependencies.soroban-spec] +version = "=20.1.0" +git = "https://github.com/stellar/rs-soroban-sdk" +rev = "e6c2c900ab82b5f6eec48f69cb2cb519e19819cb" +# path = "../rs-soroban-sdk/soroban-spec" + +[workspace.dependencies.soroban-spec-rust] +version = "=20.1.0" +git = "https://github.com/stellar/rs-soroban-sdk" +rev = "e6c2c900ab82b5f6eec48f69cb2cb519e19819cb" +# path = "../rs-soroban-sdk/soroban-spec-rust" + +[workspace.dependencies.soroban-spec-json] +version = "20.2.0" +path = "./cmd/crates/soroban-spec-json" + +[workspace.dependencies.soroban-spec-typescript] +version = "20.2.0" +path = "./cmd/crates/soroban-spec-typescript" + +[workspace.dependencies.soroban-spec-tools] +version = "20.2.0" +path = "./cmd/crates/soroban-spec-tools" + +[workspace.dependencies.soroban-sdk] +version = "=20.1.0" +git = "https://github.com/stellar/rs-soroban-sdk" +rev = "e6c2c900ab82b5f6eec48f69cb2cb519e19819cb" + +[workspace.dependencies.soroban-token-sdk] +version = "=20.1.0" +git = "https://github.com/stellar/rs-soroban-sdk" +rev = "e6c2c900ab82b5f6eec48f69cb2cb519e19819cb" + +[workspace.dependencies.soroban-ledger-snapshot] +version = "=20.1.0" +git = "https://github.com/stellar/rs-soroban-sdk" +rev = "e6c2c900ab82b5f6eec48f69cb2cb519e19819cb" + +[workspace.dependencies.soroban-cli] +version = "20.2.0" +path = "cmd/soroban-cli" + +[workspace.dependencies.stellar-xdr] +version = "=20.0.2" +default-features = true + +[workspace.dependencies] +base64 = "0.21.2" +thiserror = "1.0.46" +sha2 = "0.10.7" +ethnum = "1.3.2" +hex = "0.4.3" +itertools = "0.10.0" +sep5 = "0.0.2" +serde_json = "1.0.82" +serde = "1.0.82" +stellar-strkey = "0.0.7" +tracing = "0.1.37" +tracing-subscriber = "0.3.16" +tracing-appender = "0.2.2" +which = "4.4.0" +wasmparser = "0.90.0" + + +# [patch."https://github.com/stellar/rs-soroban-env"] +# soroban-env-host = { path = "../rs-soroban-env/soroban-env-host/" } +# [patch."https://github.com/stellar/rs-soroban-sdk"] +# soroban-spec = { path = "../rs-soroban-sdk/soroban-spec/" } +# soroban-token-spec = { path = "../rs-soroban-sdk/soroban-token-spec/" } +# soroban-sdk = { path = "../rs-soroban-sdk/soroban-sdk/" } +# [patch."https://github.com/stellar/rs-stellar-xdr"] +# stellar-xdr = { path = "../rs-stellar-xdr/" } + +[profile.test-wasms] +inherits = "release" +opt-level = "z" +overflow-checks = true +debug = 0 +strip = "symbols" +debug-assertions = true +panic = "abort" +codegen-units = 1 +lto = true + +[profile.release-with-panic-unwind] +inherits = 'release' +panic = 'unwind' diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..000b2aef --- /dev/null +++ b/Makefile @@ -0,0 +1,113 @@ +all: check build test + +export RUSTFLAGS=-Dwarnings -Dclippy::all -Dclippy::pedantic + +REPOSITORY_COMMIT_HASH := "$(shell git rev-parse HEAD)" +ifeq (${REPOSITORY_COMMIT_HASH},"") + $(error failed to retrieve git head commit hash) +endif +# Want to treat empty assignment, `REPOSITORY_VERSION=` the same as absence or unset. +# By default make `?=` operator will treat empty assignment as a set value and will not use the default value. +# Both cases should fallback to default of getting the version from git tag. +ifeq ($(strip $(REPOSITORY_VERSION)),) + override REPOSITORY_VERSION = "$(shell git describe --tags --always --abbrev=0 --match='v[0-9]*.[0-9]*.[0-9]*' 2> /dev/null | sed 's/^.//')" +endif +REPOSITORY_BRANCH := "$(shell git rev-parse --abbrev-ref HEAD)" +BUILD_TIMESTAMP ?= $(shell date '+%Y-%m-%dT%H:%M:%S') +GOLDFLAGS := -X 'github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/config.Version=${REPOSITORY_VERSION}' \ + -X 'github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/config.CommitHash=${REPOSITORY_COMMIT_HASH}' \ + -X 'github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/config.BuildTimestamp=${BUILD_TIMESTAMP}' \ + -X 'github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/config.Branch=${REPOSITORY_BRANCH}' + + +# The following works around incompatibility between the rust and the go linkers - +# the rust would generate an object file with min-version of 13.0 where-as the go +# compiler would generate a binary compatible with 12.3 and up. To align these +# we instruct the go compiler to produce binaries comparible with version 13.0. +# this is a mac-only limitation. +ifeq ($(shell uname -s),Darwin) + MACOS_MIN_VER = -ldflags='-extldflags -mmacosx-version-min=13.0' +endif + +# Always specify the build target so that libpreflight.a is always put into +# an architecture subdirectory (i.e. target/$(CARGO_BUILD_TARGET)/release-with-panic-unwind ) +# Otherwise it will be much harder for Golang to find the library since +# it would need to distinguish when we are crosscompiling and when we are not +# (libpreflight.a is put at target/release-with-panic-unwind/ when not cross compiling) +CARGO_BUILD_TARGET ?= $(shell rustc -vV | sed -n 's|host: ||p') + +# update the Cargo.lock every time the Cargo.toml changes. +Cargo.lock: Cargo.toml + cargo update --workspace + +install_rust: Cargo.lock + cargo install --path ./cmd/soroban-cli --debug + cargo install --path ./cmd/crates/soroban-test/tests/fixtures/hello --root ./target --debug --quiet + +install: install_rust build-libpreflight + go install -ldflags="${GOLDFLAGS}" ${MACOS_MIN_VER} ./... + +build_rust: Cargo.lock + cargo build + +build_go: build-libpreflight + go build -ldflags="${GOLDFLAGS}" ${MACOS_MIN_VER} ./... + +# regenerate the example lib in `cmd/crates/soroban-spec-typsecript/fixtures/ts` +build-snapshot: typescript-bindings-fixtures + +build: build_rust build_go + +build-libpreflight: Cargo.lock + cd cmd/soroban-rpc/lib/preflight && cargo build --target $(CARGO_BUILD_TARGET) --profile release-with-panic-unwind + +build-test-wasms: Cargo.lock + cargo build --package 'test_*' --profile test-wasms --target wasm32-unknown-unknown + +build-test: build-test-wasms install_rust + +test: build-test + cargo test + +e2e-test: + cargo test --test it -- --ignored + +check: Cargo.lock + cargo clippy --all-targets + +watch: + cargo watch --clear --watch-when-idle --shell '$(MAKE)' + +fmt: + cargo fmt --all + +clean: + cargo clean + go clean ./... + +publish: + cargo workspaces publish --all --force '*' --from-git --yes + +# the build-soroban-rpc build target is an optimized build target used by +# https://github.com/stellar/pipelines/stellar-horizon/Jenkinsfile-soroban-rpc-package-builder +# as part of the package building. +build-soroban-rpc: build-libpreflight + go build -ldflags="${GOLDFLAGS}" ${MACOS_MIN_VER} -o soroban-rpc -trimpath -v ./cmd/soroban-rpc + +lint-changes: + golangci-lint run ./... --new-from-rev $$(git rev-parse HEAD) + +lint: + golangci-lint run ./... + +typescript-bindings-fixtures: build-test-wasms + cargo run -- contract bindings typescript \ + --wasm ./target/wasm32-unknown-unknown/test-wasms/test_custom_types.wasm \ + --contract-id CBYMYMSDF6FBDNCFJCRC7KMO4REYFPOH2U4N7FXI3GJO6YXNCQ43CDSK \ + --network futurenet \ + --output-dir ./cmd/crates/soroban-spec-typescript/fixtures/test_custom_types \ + --overwrite + + +# PHONY lists all the targets that aren't file names, so that make would skip the timestamp based check. +.PHONY: publish clean fmt watch check e2e-test test build-test-wasms install build build-soroban-rpc build-libpreflight lint lint-changes build-snapshot typescript-bindings-fixtures diff --git a/RELEASING.md b/RELEASING.md new file mode 100644 index 00000000..09a6a7b4 --- /dev/null +++ b/RELEASING.md @@ -0,0 +1,5 @@ +# Releasing + +The process for how to release the crates in this repository are documented here: + +https://github.com/stellar/actions/blob/main/README-rust-release.md diff --git a/cmd/crates/soroban-spec-json/Cargo.toml b/cmd/crates/soroban-spec-json/Cargo.toml new file mode 100644 index 00000000..04051ea6 --- /dev/null +++ b/cmd/crates/soroban-spec-json/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "soroban-spec-json" +description = "Soroban contract spec utilities for generating JSON." +homepage = "https://github.com/stellar/soroban-tools" +repository = "https://github.com/stellar/soroban-tools" +authors = ["Stellar Development Foundation "] +readme = "README.md" +license = "Apache-2.0" +version.workspace = true +edition = "2021" +rust-version.workspace = true + +[dependencies] +soroban-spec = { workspace = true } +thiserror = "1.0.32" +serde = "1.0.82" +serde_derive = "1.0.82" +serde_json = "1.0.82" +sha2 = "0.9.9" + +[dependencies.stellar-xdr] +workspace = true +features = ["curr", "std", "serde"] + +[dev_dependencies] +pretty_assertions = "1.2.1" diff --git a/cmd/crates/soroban-spec-json/README.md b/cmd/crates/soroban-spec-json/README.md new file mode 100644 index 00000000..cf3c38c4 --- /dev/null +++ b/cmd/crates/soroban-spec-json/README.md @@ -0,0 +1,3 @@ +# soroban-spec-json + +Generation of JSON that describes a Soroban contract specification / interface. diff --git a/cmd/crates/soroban-spec-json/src/lib.rs b/cmd/crates/soroban-spec-json/src/lib.rs new file mode 100644 index 00000000..cdc64665 --- /dev/null +++ b/cmd/crates/soroban-spec-json/src/lib.rs @@ -0,0 +1,229 @@ +use std::{fs, io}; + +pub mod types; + +use sha2::{Digest, Sha256}; + +use stellar_xdr::curr::ScSpecEntry; +use types::Entry; + +use soroban_spec::read::{from_wasm, FromWasmError}; + +#[derive(thiserror::Error, Debug)] +pub enum GenerateFromFileError { + #[error("reading file: {0}")] + Io(io::Error), + #[error("sha256 does not match, expected: {expected}")] + VerifySha256 { expected: String }, + #[error("parsing contract spec: {0}")] + Parse(stellar_xdr::curr::Error), + #[error("getting contract spec: {0}")] + GetSpec(FromWasmError), +} + +/// # Errors +/// +/// Will return an error if the file cannot be read, or the wasm cannot be parsed. +pub fn generate_from_file( + file: &str, + verify_sha256: Option<&str>, +) -> Result { + // Read file. + let wasm = fs::read(file).map_err(GenerateFromFileError::Io)?; + + // Produce hash for file. + let sha256 = Sha256::digest(&wasm); + let sha256 = format!("{sha256:x}"); + + if let Some(verify_sha256) = verify_sha256 { + if verify_sha256 != sha256 { + return Err(GenerateFromFileError::VerifySha256 { expected: sha256 }); + } + } + + // Generate code. + let json = generate_from_wasm(&wasm).map_err(GenerateFromFileError::GetSpec)?; + Ok(json) +} + +/// # Errors +/// +/// Will return an error if the wasm cannot be parsed. +pub fn generate_from_wasm(wasm: &[u8]) -> Result { + let spec = from_wasm(wasm)?; + let json = generate(&spec); + Ok(json) +} + +/// # Panics +/// +/// If `serde_json::to_string_pretty` fails to serialize the spec entries. +pub fn generate(spec: &[ScSpecEntry]) -> String { + let collected: Vec<_> = spec.iter().map(Entry::from).collect(); + serde_json::to_string_pretty(&collected).expect("serialization of the spec entries should not have any failure cases as all keys are strings and the serialize implementations are derived") +} + +#[allow(clippy::too_many_lines)] +#[cfg(test)] +mod test { + use pretty_assertions::assert_eq; + use soroban_spec::read::from_wasm; + + use super::generate; + + const EXAMPLE_WASM: &[u8] = + include_bytes!("../../../../target/wasm32-unknown-unknown/test-wasms/test_udt.wasm"); + + #[test] + fn example() { + let entries = from_wasm(EXAMPLE_WASM).unwrap(); + let json = generate(&entries); + assert_eq!( + json, + r#"[ + { + "type": "enum", + "doc": "", + "name": "UdtEnum2", + "cases": [ + { + "doc": "", + "name": "A", + "value": 10 + }, + { + "doc": "", + "name": "B", + "value": 15 + } + ] + }, + { + "type": "union", + "doc": "", + "name": "UdtEnum", + "cases": [ + { + "doc": "", + "name": "UdtA", + "values": [] + }, + { + "doc": "", + "name": "UdtB", + "values": [ + { + "type": "custom", + "name": "UdtStruct" + } + ] + }, + { + "doc": "", + "name": "UdtC", + "values": [ + { + "type": "custom", + "name": "UdtEnum2" + } + ] + }, + { + "doc": "", + "name": "UdtD", + "values": [ + { + "type": "custom", + "name": "UdtTuple" + } + ] + } + ] + }, + { + "type": "struct", + "doc": "", + "name": "UdtTuple", + "fields": [ + { + "doc": "", + "name": "0", + "value": { + "type": "i64" + } + }, + { + "doc": "", + "name": "1", + "value": { + "type": "vec", + "element": { + "type": "i64" + } + } + } + ] + }, + { + "type": "struct", + "doc": "", + "name": "UdtStruct", + "fields": [ + { + "doc": "", + "name": "a", + "value": { + "type": "i64" + } + }, + { + "doc": "", + "name": "b", + "value": { + "type": "i64" + } + }, + { + "doc": "", + "name": "c", + "value": { + "type": "vec", + "element": { + "type": "i64" + } + } + } + ] + }, + { + "type": "function", + "doc": "", + "name": "add", + "inputs": [ + { + "doc": "", + "name": "a", + "value": { + "type": "custom", + "name": "UdtEnum" + } + }, + { + "doc": "", + "name": "b", + "value": { + "type": "custom", + "name": "UdtEnum" + } + } + ], + "outputs": [ + { + "type": "i64" + } + ] + } +]"#, + ); + } +} diff --git a/cmd/crates/soroban-spec-json/src/types.rs b/cmd/crates/soroban-spec-json/src/types.rs new file mode 100644 index 00000000..8e3b2177 --- /dev/null +++ b/cmd/crates/soroban-spec-json/src/types.rs @@ -0,0 +1,245 @@ +use serde::Serialize; +use stellar_xdr::curr::{ + ScSpecEntry, ScSpecFunctionInputV0, ScSpecTypeDef, ScSpecUdtEnumCaseV0, + ScSpecUdtErrorEnumCaseV0, ScSpecUdtStructFieldV0, ScSpecUdtUnionCaseV0, +}; + +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct StructField { + pub doc: String, + pub name: String, + pub value: Type, +} + +impl From<&ScSpecUdtStructFieldV0> for StructField { + fn from(f: &ScSpecUdtStructFieldV0) -> Self { + StructField { + doc: f.doc.to_utf8_string_lossy(), + name: f.name.to_utf8_string_lossy(), + value: (&f.type_).into(), + } + } +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct FunctionInput { + pub doc: String, + pub name: String, + pub value: Type, +} + +impl From<&ScSpecFunctionInputV0> for FunctionInput { + fn from(f: &ScSpecFunctionInputV0) -> Self { + FunctionInput { + doc: f.doc.to_utf8_string_lossy(), + name: f.name.to_utf8_string_lossy(), + value: (&f.type_).into(), + } + } +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct UnionCase { + pub doc: String, + pub name: String, + pub values: Vec, +} + +impl From<&ScSpecUdtUnionCaseV0> for UnionCase { + fn from(c: &ScSpecUdtUnionCaseV0) -> Self { + let (doc, name, values) = match c { + ScSpecUdtUnionCaseV0::VoidV0(v) => ( + v.doc.to_utf8_string_lossy(), + v.name.to_utf8_string_lossy(), + vec![], + ), + ScSpecUdtUnionCaseV0::TupleV0(t) => ( + t.doc.to_utf8_string_lossy(), + t.name.to_utf8_string_lossy(), + t.type_.iter().map(Type::from).collect(), + ), + }; + UnionCase { doc, name, values } + } +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct EnumCase { + pub doc: String, + pub name: String, + pub value: u32, +} + +impl From<&ScSpecUdtEnumCaseV0> for EnumCase { + fn from(c: &ScSpecUdtEnumCaseV0) -> Self { + EnumCase { + doc: c.doc.to_utf8_string_lossy(), + name: c.name.to_utf8_string_lossy(), + value: c.value, + } + } +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ErrorEnumCase { + pub doc: String, + pub name: String, + pub value: u32, +} + +impl From<&ScSpecUdtErrorEnumCaseV0> for EnumCase { + fn from(c: &ScSpecUdtErrorEnumCaseV0) -> Self { + EnumCase { + doc: c.doc.to_utf8_string_lossy(), + name: c.name.to_utf8_string_lossy(), + value: c.value, + } + } +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize)] +#[serde(tag = "type")] +#[serde(rename_all = "camelCase")] +pub enum Type { + Void, + Val, + U64, + I64, + U32, + I32, + U128, + I128, + U256, + I256, + Bool, + Symbol, + Error, + Bytes, + String, + Address, + Timepoint, + Duration, + Map { key: Box, value: Box }, + Option { value: Box }, + Result { value: Box, error: Box }, + Vec { element: Box }, + BytesN { n: u32 }, + Tuple { elements: Vec }, + Custom { name: String }, +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize)] +#[serde(tag = "type")] +#[serde(rename_all = "camelCase")] +pub enum Entry { + Function { + doc: String, + name: String, + inputs: Vec, + outputs: Vec, + }, + Struct { + doc: String, + name: String, + fields: Vec, + }, + Union { + doc: String, + name: String, + cases: Vec, + }, + Enum { + doc: String, + name: String, + cases: Vec, + }, + ErrorEnum { + doc: String, + name: String, + cases: Vec, + }, +} + +impl From<&ScSpecTypeDef> for Type { + fn from(spec: &ScSpecTypeDef) -> Self { + match spec { + ScSpecTypeDef::Map(map) => Type::Map { + key: Box::new(Type::from(map.key_type.as_ref())), + value: Box::new(Type::from(map.value_type.as_ref())), + }, + ScSpecTypeDef::Option(opt) => Type::Option { + value: Box::new(Type::from(opt.value_type.as_ref())), + }, + ScSpecTypeDef::Result(res) => Type::Result { + value: Box::new(Type::from(res.ok_type.as_ref())), + error: Box::new(Type::from(res.error_type.as_ref())), + }, + ScSpecTypeDef::Tuple(tuple) => Type::Tuple { + elements: tuple.value_types.iter().map(Type::from).collect(), + }, + ScSpecTypeDef::Vec(vec) => Type::Vec { + element: Box::new(Type::from(vec.element_type.as_ref())), + }, + ScSpecTypeDef::Udt(udt) => Type::Custom { + name: udt.name.to_utf8_string_lossy(), + }, + ScSpecTypeDef::BytesN(b) => Type::BytesN { n: b.n }, + ScSpecTypeDef::Val => Type::Val, + ScSpecTypeDef::U64 => Type::U64, + ScSpecTypeDef::I64 => Type::I64, + ScSpecTypeDef::U32 => Type::U32, + ScSpecTypeDef::I32 => Type::I32, + ScSpecTypeDef::U128 => Type::U128, + ScSpecTypeDef::I128 => Type::I128, + ScSpecTypeDef::U256 => Type::U256, + ScSpecTypeDef::I256 => Type::I256, + ScSpecTypeDef::Bool => Type::Bool, + ScSpecTypeDef::Symbol => Type::Symbol, + ScSpecTypeDef::Error => Type::Error, + ScSpecTypeDef::Bytes => Type::Bytes, + ScSpecTypeDef::String => Type::String, + ScSpecTypeDef::Address => Type::Address, + ScSpecTypeDef::Void => Type::Void, + ScSpecTypeDef::Timepoint => Type::Timepoint, + ScSpecTypeDef::Duration => Type::Duration, + } + } +} + +impl From<&ScSpecEntry> for Entry { + fn from(spec: &ScSpecEntry) -> Self { + match spec { + ScSpecEntry::FunctionV0(f) => Entry::Function { + doc: f.doc.to_utf8_string_lossy(), + name: f.name.to_utf8_string_lossy(), + inputs: f.inputs.iter().map(FunctionInput::from).collect(), + outputs: f.outputs.iter().map(Type::from).collect(), + }, + ScSpecEntry::UdtStructV0(s) => Entry::Struct { + doc: s.doc.to_utf8_string_lossy(), + name: s.name.to_utf8_string_lossy(), + fields: s.fields.iter().map(StructField::from).collect(), + }, + ScSpecEntry::UdtUnionV0(u) => Entry::Union { + doc: u.doc.to_utf8_string_lossy(), + name: u.name.to_utf8_string_lossy(), + cases: u.cases.iter().map(UnionCase::from).collect(), + }, + ScSpecEntry::UdtEnumV0(e) => Entry::Enum { + doc: e.doc.to_utf8_string_lossy(), + name: e.name.to_utf8_string_lossy(), + cases: e.cases.iter().map(EnumCase::from).collect(), + }, + ScSpecEntry::UdtErrorEnumV0(e) => Entry::Enum { + doc: e.doc.to_utf8_string_lossy(), + name: e.name.to_utf8_string_lossy(), + cases: e.cases.iter().map(EnumCase::from).collect(), + }, + } + } +} diff --git a/cmd/crates/soroban-spec-tools/Cargo.toml b/cmd/crates/soroban-spec-tools/Cargo.toml new file mode 100644 index 00000000..61a32c13 --- /dev/null +++ b/cmd/crates/soroban-spec-tools/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "soroban-spec-tools" +description = "Tools for using a contract's XDR spec" +homepage = "https://github.com/stellar/soroban-tools" +repository = "https://github.com/stellar/soroban-tools" +authors = ["Stellar Development Foundation "] +license = "Apache-2.0" +readme = "README.md" +version.workspace = true +edition = "2021" +rust-version.workspace = true +autobins = false + + +[lib] +crate-type = ["rlib"] + + +[dependencies] +soroban-spec = { workspace = true } +stellar-strkey = { workspace = true } +stellar-xdr = { workspace = true, features = ["curr", "std", "serde"] } +serde_json = { workspace = true } +itertools = { workspace = true } +ethnum = { workspace = true } +hex = { workspace = true } +wasmparser = { workspace = true } +base64 = { workspace = true } +thiserror = "1.0.31" +# soroban-ledger-snapshot = { workspace = true } +# soroban-sdk = { workspace = true } +# sep5 = { workspace = true } + + +[dev-dependencies] +which = { workspace = true } +tokio = "1.28.1" diff --git a/cmd/crates/soroban-spec-tools/README.md b/cmd/crates/soroban-spec-tools/README.md new file mode 100644 index 00000000..d2b00654 --- /dev/null +++ b/cmd/crates/soroban-spec-tools/README.md @@ -0,0 +1,3 @@ +# soroban-spec-tools + +Tools and utilities for soroban specification / interface. diff --git a/cmd/crates/soroban-spec-tools/src/lib.rs b/cmd/crates/soroban-spec-tools/src/lib.rs new file mode 100644 index 00000000..7f310fd9 --- /dev/null +++ b/cmd/crates/soroban-spec-tools/src/lib.rs @@ -0,0 +1,1468 @@ +#![allow(clippy::missing_errors_doc, clippy::must_use_candidate)] +use std::str::FromStr; + +use itertools::Itertools; +use serde_json::{json, Value}; +use stellar_xdr::curr::{ + AccountId, BytesM, ContractExecutable, Error as XdrError, Hash, Int128Parts, Int256Parts, + PublicKey, ScAddress, ScBytes, ScContractInstance, ScMap, ScMapEntry, ScNonceKey, ScSpecEntry, + ScSpecFunctionV0, ScSpecTypeDef as ScType, ScSpecTypeMap, ScSpecTypeOption, ScSpecTypeResult, + ScSpecTypeTuple, ScSpecTypeUdt, ScSpecTypeVec, ScSpecUdtEnumV0, ScSpecUdtErrorEnumCaseV0, + ScSpecUdtErrorEnumV0, ScSpecUdtStructV0, ScSpecUdtUnionCaseTupleV0, ScSpecUdtUnionCaseV0, + ScSpecUdtUnionCaseVoidV0, ScSpecUdtUnionV0, ScString, ScSymbol, ScVal, ScVec, StringM, + UInt128Parts, UInt256Parts, Uint256, VecM, +}; + +pub mod utils; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("an unknown error occurred")] + Unknown, + #[error("Invalid pair {0:#?} {1:#?}")] + InvalidPair(ScVal, ScType), + #[error("value is not parseable to {0:#?}")] + InvalidValue(Option), + #[error("Unknown case {0} for {1}")] + EnumCase(String, String), + #[error("Enum {0} missing value for type {1}")] + EnumMissingSecondValue(String, String), + #[error("Enum {0} is illformed")] + IllFormedEnum(String), + #[error("Unknown const case {0}")] + EnumConst(u32), + #[error("Enum const value must be a u32 or smaller")] + EnumConstTooLarge(u64), + #[error("Missing Entry {0}")] + MissingEntry(String), + #[error("Missing Spec")] + MissingSpec, + #[error(transparent)] + Xdr(XdrError), + #[error(transparent)] + Serde(#[from] serde_json::Error), + #[error(transparent)] + Ethnum(#[from] core::num::ParseIntError), + + #[error("Missing key {0} in map")] + MissingKey(String), + #[error("Failed to convert {0} to number")] + FailedNumConversion(serde_json::Number), + #[error("First argument in an enum must be a sybmol")] + EnumFirstValueNotSymbol, + #[error("Failed to find enum case {0}")] + FailedToFindEnumCase(String), + #[error(transparent)] + FailedSilceToByte(#[from] std::array::TryFromSliceError), + #[error(transparent)] + Infallible(#[from] std::convert::Infallible), + #[error("Missing Error case {0}")] + MissingErrorCase(u32), + #[error(transparent)] + Spec(#[from] soroban_spec::read::FromWasmError), + #[error(transparent)] + Base64Spec(#[from] soroban_spec::read::ParseSpecBase64Error), +} + +#[derive(Default, Clone)] +pub struct Spec(pub Option>); + +impl TryInto for &[u8] { + type Error = soroban_spec::read::FromWasmError; + + fn try_into(self) -> Result { + let spec = soroban_spec::read::from_wasm(self)?; + Ok(Spec::new(spec)) + } +} + +impl Spec { + pub fn new(entries: Vec) -> Self { + Self(Some(entries)) + } + + pub fn from_wasm(wasm: &[u8]) -> Result { + let spec = soroban_spec::read::from_wasm(wasm)?; + Ok(Spec::new(spec)) + } + + pub fn parse_base64(base64: &str) -> Result { + let spec = soroban_spec::read::parse_base64(base64.as_bytes())?; + Ok(Spec::new(spec)) + } +} + +impl Spec { + /// # Errors + /// Could fail to find User Defined Type + pub fn doc(&self, name: &str, type_: &ScType) -> Result, Error> { + let mut str = match type_ { + ScType::Val + | ScType::U64 + | ScType::I64 + | ScType::U128 + | ScType::I128 + | ScType::U32 + | ScType::I32 + | ScType::Result(_) + | ScType::Vec(_) + | ScType::Map(_) + | ScType::Tuple(_) + | ScType::BytesN(_) + | ScType::Symbol + | ScType::Error + | ScType::Bytes + | ScType::Void + | ScType::Timepoint + | ScType::Duration + | ScType::U256 + | ScType::I256 + | ScType::String + | ScType::Bool => String::new(), + ScType::Address => String::from( + "Can be public key (G13..), a contract hash (6c45307) or an identity (alice), ", + ), + ScType::Option(type_) => return self.doc(name, &type_.value_type), + ScType::Udt(ScSpecTypeUdt { name }) => { + let spec_type = self.find(&name.to_utf8_string_lossy())?; + match spec_type { + ScSpecEntry::FunctionV0(ScSpecFunctionV0 { doc, .. }) + | ScSpecEntry::UdtStructV0(ScSpecUdtStructV0 { doc, .. }) + | ScSpecEntry::UdtUnionV0(ScSpecUdtUnionV0 { doc, .. }) + | ScSpecEntry::UdtEnumV0(ScSpecUdtEnumV0 { doc, .. }) + | ScSpecEntry::UdtErrorEnumV0(ScSpecUdtErrorEnumV0 { doc, .. }) => doc, + } + .to_utf8_string_lossy() + } + }; + if let Some(mut ex) = self.example(type_) { + if ex.contains(' ') { + ex = format!("'{ex}'"); + } else if ex.contains('"') { + ex = ex.replace('"', ""); + } + if matches!(type_, ScType::Bool) { + ex = String::new(); + } + let sep = if str.is_empty() { "" } else { "\n" }; + str = format!("{str}{sep}Example:\n --{name} {ex}"); + if ex.contains('"') {} + } + if str.is_empty() { + Ok(None) + } else { + Ok(Some(Box::leak(str.into_boxed_str()))) + } + } + + /// # Errors + /// + /// Might return errors + pub fn find(&self, name: &str) -> Result<&ScSpecEntry, Error> { + self.0 + .as_ref() + .and_then(|specs| { + specs.iter().find(|e| { + let entry_name = match e { + ScSpecEntry::FunctionV0(x) => x.name.to_utf8_string_lossy(), + ScSpecEntry::UdtStructV0(x) => x.name.to_utf8_string_lossy(), + ScSpecEntry::UdtUnionV0(x) => x.name.to_utf8_string_lossy(), + ScSpecEntry::UdtEnumV0(x) => x.name.to_utf8_string_lossy(), + ScSpecEntry::UdtErrorEnumV0(x) => x.name.to_utf8_string_lossy(), + }; + name == entry_name + }) + }) + .ok_or_else(|| Error::MissingEntry(name.to_owned())) + } + + /// # Errors + /// + /// Might return errors + pub fn find_function(&self, name: &str) -> Result<&ScSpecFunctionV0, Error> { + match self.find(name)? { + ScSpecEntry::FunctionV0(f) => Ok(f), + _ => Err(Error::MissingEntry(name.to_owned())), + } + } + // + /// # Errors + /// + pub fn find_functions(&self) -> Result, Error> { + Ok(self + .0 + .as_ref() + .ok_or(Error::MissingSpec)? + .iter() + .filter_map(|e| match e { + ScSpecEntry::FunctionV0(x) => Some(x), + _ => None, + })) + } + + /// # Errors + /// + pub fn find_error_type(&self, value: u32) -> Result<&ScSpecUdtErrorEnumCaseV0, Error> { + if let ScSpecEntry::UdtErrorEnumV0(ScSpecUdtErrorEnumV0 { cases, .. }) = + self.find("Error")? + { + if let Some(case) = cases.iter().find(|case| value == case.value) { + return Ok(case); + } + } + Err(Error::MissingErrorCase(value)) + } + + /// # Errors + /// + /// Might return errors + pub fn from_string_primitive(s: &str, t: &ScType) -> Result { + Self::default().from_string(s, t) + } + + /// # Errors + /// + /// Might return errors + #[allow(clippy::wrong_self_convention)] + pub fn from_string(&self, s: &str, t: &ScType) -> Result { + if let ScType::Option(b) = t { + if s == "null" { + return Ok(ScVal::Void); + } + let ScSpecTypeOption { value_type } = b.as_ref().clone(); + let v = value_type.as_ref().clone(); + return self.from_string(s, &v); + } + // Parse as string and for special types assume Value::String + serde_json::from_str(s) + .map_or_else( + |e| match t { + ScType::Symbol + | ScType::String + | ScType::Bytes + | ScType::BytesN(_) + | ScType::U256 + | ScType::I256 + | ScType::U128 + | ScType::I128 + | ScType::Address => Ok(Value::String(s.to_owned())), + ScType::Udt(ScSpecTypeUdt { name }) + if matches!( + self.find(&name.to_utf8_string_lossy())?, + ScSpecEntry::UdtUnionV0(_) | ScSpecEntry::UdtStructV0(_) + ) => + { + Ok(Value::String(s.to_owned())) + } + _ => Err(Error::Serde(e)), + }, + |val| match t { + ScType::U128 | ScType::I128 | ScType::U256 | ScType::I256 => { + Ok(Value::String(s.to_owned())) + } + _ => Ok(val), + }, + ) + .and_then(|raw| self.from_json(&raw, t)) + } + + /// # Errors + /// + /// Might return errors + #[allow(clippy::wrong_self_convention)] + pub fn from_json(&self, v: &Value, t: &ScType) -> Result { + let val: ScVal = match (t, v) { + ( + ScType::Bool + | ScType::U128 + | ScType::I128 + | ScType::U256 + | ScType::I256 + | ScType::I32 + | ScType::I64 + | ScType::U32 + | ScType::U64 + | ScType::String + | ScType::Symbol + | ScType::Address + | ScType::Bytes + | ScType::BytesN(_), + _, + ) => from_json_primitives(v, t)?, + + // Vec parsing + (ScType::Vec(elem), Value::Array(raw)) => { + let converted: ScVec = raw + .iter() + .map(|item| self.from_json(item, &elem.element_type)) + .collect::, Error>>()? + .try_into() + .map_err(Error::Xdr)?; + ScVal::Vec(Some(converted)) + } + + // Map parsing + (ScType::Map(map), Value::Object(raw)) => self.parse_map(map, raw)?, + + // Option parsing + (ScType::Option(_), Value::Null) => ScVal::Void, + (ScType::Option(elem), v) => self.from_json(v, &elem.value_type)?, + + // Tuple parsing + (ScType::Tuple(elem), Value::Array(raw)) => self.parse_tuple(t, elem, raw)?, + + // User defined types parsing + (ScType::Udt(ScSpecTypeUdt { name }), _) => self.parse_udt(name, v)?, + + // TODO: Implement the rest of these + (_, raw) => serde_json::from_value(raw.clone()).map_err(Error::Serde)?, + }; + Ok(val) + } + + fn parse_udt(&self, name: &StringM<60>, value: &Value) -> Result { + let name = &name.to_utf8_string_lossy(); + match (self.find(name)?, value) { + (ScSpecEntry::UdtStructV0(strukt), Value::Object(map)) => { + if strukt + .fields + .iter() + .any(|f| f.name.to_utf8_string_lossy() == "0") + { + self.parse_tuple_strukt( + strukt, + &(0..map.len()) + .map(|i| map.get(&i.to_string()).unwrap().clone()) + .collect::>(), + ) + } else { + self.parse_strukt(strukt, map) + } + } + (ScSpecEntry::UdtStructV0(strukt), Value::Array(arr)) => { + self.parse_tuple_strukt(strukt, arr) + } + ( + ScSpecEntry::UdtUnionV0(union), + val @ (Value::Array(_) | Value::String(_) | Value::Object(_)), + ) => self.parse_union(union, val), + (ScSpecEntry::UdtEnumV0(enum_), Value::Number(num)) => parse_const_enum(num, enum_), + (s, v) => todo!("Not implemented for {s:#?} {v:#?}"), + } + } + + fn parse_tuple_strukt( + &self, + strukt: &ScSpecUdtStructV0, + array: &[Value], + ) -> Result { + let items = strukt + .fields + .to_vec() + .iter() + .zip(array.iter()) + .map(|(f, v)| { + let val = self.from_json(v, &f.type_)?; + Ok(val) + }) + .collect::, Error>>()?; + Ok(ScVal::Vec(Some(items.try_into().map_err(Error::Xdr)?))) + } + + fn parse_strukt( + &self, + strukt: &ScSpecUdtStructV0, + map: &serde_json::Map, + ) -> Result { + let items = strukt + .fields + .to_vec() + .iter() + .map(|f| { + let name = &f.name.to_utf8_string_lossy(); + let v = map + .get(name) + .ok_or_else(|| Error::MissingKey(name.clone()))?; + let val = self.from_json(v, &f.type_)?; + let key = StringM::from_str(name).unwrap(); + Ok(ScMapEntry { + key: ScVal::Symbol(key.into()), + val, + }) + }) + .collect::, Error>>()?; + let map = ScMap::sorted_from(items).map_err(Error::Xdr)?; + Ok(ScVal::Map(Some(map))) + } + + fn parse_union(&self, union: &ScSpecUdtUnionV0, value: &Value) -> Result { + let (enum_case, rest) = match value { + Value::String(s) => (s, None), + Value::Object(o) if o.len() == 1 => { + let res = o.values().next().map(|v| match v { + Value::Object(obj) if obj.contains_key("0") => { + let len = obj.len(); + Value::Array( + (0..len) + .map(|i| obj.get(&i.to_string()).unwrap().clone()) + .collect::>(), + ) + } + _ => v.clone(), + }); + (o.keys().next().unwrap(), res) + } + _ => todo!(), + }; + let case = union + .cases + .iter() + .find(|c| { + let name = match c { + ScSpecUdtUnionCaseV0::VoidV0(v) => &v.name, + ScSpecUdtUnionCaseV0::TupleV0(v) => &v.name, + }; + enum_case == &name.to_utf8_string_lossy() + }) + .ok_or_else(|| { + Error::EnumCase(enum_case.to_string(), union.name.to_utf8_string_lossy()) + })?; + + let mut res = vec![ScVal::Symbol(ScSymbol( + enum_case.try_into().map_err(Error::Xdr)?, + ))]; + + match (case, rest) { + (ScSpecUdtUnionCaseV0::VoidV0(_), _) | (ScSpecUdtUnionCaseV0::TupleV0(_), None) => (), + (ScSpecUdtUnionCaseV0::TupleV0(ScSpecUdtUnionCaseTupleV0 { type_, .. }), Some(arr)) + if type_.len() == 1 => + { + res.push(self.from_json(&arr, &type_[0])?); + } + ( + ScSpecUdtUnionCaseV0::TupleV0(ScSpecUdtUnionCaseTupleV0 { type_, .. }), + Some(Value::Array(arr)), + ) => { + res.extend( + arr.iter() + .zip(type_.iter()) + .map(|(elem, ty)| self.from_json(elem, ty)) + .collect::, _>>()?, + ); + } + (ScSpecUdtUnionCaseV0::TupleV0(ScSpecUdtUnionCaseTupleV0 { .. }), Some(_)) => {} + }; + Ok(ScVal::Vec(Some(res.try_into().map_err(Error::Xdr)?))) + } + + fn parse_tuple( + &self, + t: &ScType, + tuple: &ScSpecTypeTuple, + items: &[Value], + ) -> Result { + let ScSpecTypeTuple { value_types } = tuple; + if items.len() != value_types.len() { + return Err(Error::InvalidValue(Some(t.clone()))); + }; + let parsed: Result, Error> = items + .iter() + .zip(value_types.iter()) + .map(|(item, t)| self.from_json(item, t)) + .collect(); + let converted: ScVec = parsed?.try_into().map_err(Error::Xdr)?; + Ok(ScVal::Vec(Some(converted))) + } + + fn parse_map( + &self, + map: &ScSpecTypeMap, + value_map: &serde_json::Map, + ) -> Result { + let ScSpecTypeMap { + key_type, + value_type, + } = map; + // TODO: What do we do if the expected key_type is not a string or symbol? + let parsed: Result, Error> = value_map + .iter() + .map(|(k, v)| -> Result { + let key = self.from_string(k, key_type)?; + let val = self.from_json(v, value_type)?; + Ok(ScMapEntry { key, val }) + }) + .collect(); + Ok(ScVal::Map(Some( + ScMap::sorted_from(parsed?).map_err(Error::Xdr)?, + ))) + } +} + +impl Spec { + /// # Errors + /// + /// Might return `Error::InvalidValue` + /// + /// # Panics + /// + /// May panic + pub fn xdr_to_json(&self, val: &ScVal, output: &ScType) -> Result { + Ok(match (val, output) { + (ScVal::Void, ScType::Val | ScType::Option(_) | ScType::Tuple(_)) + | (ScVal::Map(None) | ScVal::Vec(None), ScType::Option(_)) => Value::Null, + (ScVal::Bool(_), ScType::Bool) + | (ScVal::Void, ScType::Void) + | (ScVal::String(_), ScType::String) + | (ScVal::Symbol(_), ScType::Symbol) + | (ScVal::U64(_), ScType::U64) + | (ScVal::I64(_), ScType::I64) + | (ScVal::U32(_), ScType::U32) + | (ScVal::I32(_), ScType::I32) + | (ScVal::U128(_), ScType::U128) + | (ScVal::I128(_), ScType::I128) + | (ScVal::U256(_), ScType::U256) + | (ScVal::I256(_), ScType::I256) + | (ScVal::Duration(_), ScType::Duration) + | (ScVal::Timepoint(_), ScType::Timepoint) + | ( + ScVal::ContractInstance(_) + | ScVal::LedgerKeyContractInstance + | ScVal::LedgerKeyNonce(_), + _, + ) + | (ScVal::Address(_), ScType::Address) + | (ScVal::Bytes(_), ScType::Bytes | ScType::BytesN(_)) => to_json(val)?, + + (val, ScType::Result(inner)) => self.xdr_to_json(val, &inner.ok_type)?, + + (val, ScType::Option(inner)) => self.xdr_to_json(val, &inner.value_type)?, + (ScVal::Map(Some(_)) | ScVal::Vec(Some(_)) | ScVal::U32(_), type_) => { + self.sc_object_to_json(val, type_)? + } + + (ScVal::Error(_), ScType::Error) => todo!(), + (v, typed) => todo!("{v:#?} doesn't have a matching {typed:#?}"), + }) + } + + /// # Errors + /// + /// Might return an error + pub fn vec_m_to_json( + &self, + vec_m: &VecM, + type_: &ScType, + ) -> Result { + Ok(Value::Array( + vec_m + .to_vec() + .iter() + .map(|sc_val| self.xdr_to_json(sc_val, type_)) + .collect::, Error>>()?, + )) + } + + /// # Errors + /// + /// Might return an error + pub fn sc_map_to_json(&self, sc_map: &ScMap, type_: &ScSpecTypeMap) -> Result { + let v = sc_map + .iter() + .map(|ScMapEntry { key, val }| { + let key_s = self.xdr_to_json(key, &type_.key_type)?.to_string(); + let val_value = self.xdr_to_json(val, &type_.value_type)?; + Ok((key_s, val_value)) + }) + .collect::, Error>>()?; + Ok(Value::Object(v)) + } + + /// # Errors + /// + /// Might return an error + /// + /// # Panics + /// + /// May panic + pub fn udt_to_json(&self, name: &StringM<60>, sc_obj: &ScVal) -> Result { + let name = &name.to_utf8_string_lossy(); + let udt = self.find(name)?; + Ok(match (sc_obj, udt) { + (ScVal::Map(Some(map)), ScSpecEntry::UdtStructV0(strukt)) => serde_json::Value::Object( + strukt + .fields + .iter() + .zip(map.iter()) + .map(|(field, entry)| { + let val = self.xdr_to_json(&entry.val, &field.type_)?; + Ok((field.name.to_utf8_string_lossy(), val)) + }) + .collect::, Error>>()?, + ), + (ScVal::Vec(Some(vec_)), ScSpecEntry::UdtStructV0(strukt)) => Value::Array( + strukt + .fields + .iter() + .zip(vec_.iter()) + .map(|(field, entry)| self.xdr_to_json(entry, &field.type_)) + .collect::, Error>>()?, + ), + (ScVal::Vec(Some(vec_)), ScSpecEntry::UdtUnionV0(union)) => { + let v = vec_.to_vec(); + // let val = &v[0]; + let (first, rest) = match v.split_at(1) { + ([first], []) => (first, None), + ([first], rest) => (first, Some(rest)), + _ => return Err(Error::IllFormedEnum(union.name.to_utf8_string_lossy())), + }; + + let ScVal::Symbol(case_name) = first else { + return Err(Error::EnumFirstValueNotSymbol); + }; + let case = union + .cases + .iter() + .find(|case| { + let name = match case { + ScSpecUdtUnionCaseV0::VoidV0(v) => &v.name, + ScSpecUdtUnionCaseV0::TupleV0(v) => &v.name, + }; + name.as_vec() == case_name.as_vec() + }) + .ok_or_else(|| Error::FailedToFindEnumCase(case_name.to_utf8_string_lossy()))?; + + let case_name = case_name.to_utf8_string_lossy(); + match case { + ScSpecUdtUnionCaseV0::TupleV0(v) => { + let rest = rest.ok_or_else(|| { + Error::EnumMissingSecondValue( + union.name.to_utf8_string_lossy(), + case_name.clone(), + ) + })?; + let val = if v.type_.len() == 1 { + self.xdr_to_json(&rest[0], &v.type_[0])? + } else { + Value::Array( + v.type_ + .iter() + .zip(rest.iter()) + .map(|(type_, val)| self.xdr_to_json(val, type_)) + .collect::, Error>>()?, + ) + }; + + Value::Object([(case_name, val)].into_iter().collect()) + } + ScSpecUdtUnionCaseV0::VoidV0(_) => Value::String(case_name), + } + } + (ScVal::U32(v), ScSpecEntry::UdtEnumV0(_enum_)) => { + Value::Number(serde_json::Number::from(*v)) + } + (s, v) => todo!("Not implemented for {s:#?} {v:#?}"), + }) + } + + /// # Errors + /// + /// Might return an error + /// + /// # Panics + /// + /// Some types are not yet supported and will cause a panic if supplied + pub fn sc_object_to_json(&self, val: &ScVal, spec_type: &ScType) -> Result { + Ok(match (val, spec_type) { + (ScVal::Vec(Some(ScVec(vec_m))), ScType::Vec(type_)) => { + self.vec_m_to_json(vec_m, &type_.element_type)? + } + (ScVal::Vec(Some(ScVec(vec_m))), ScType::Tuple(tuple_type)) => Value::Array( + vec_m + .iter() + .zip(tuple_type.value_types.iter()) + .map(|(v, t)| self.xdr_to_json(v, t)) + .collect::, _>>()?, + ), + ( + sc_obj @ (ScVal::Vec(_) | ScVal::Map(_) | ScVal::U32(_)), + ScType::Udt(ScSpecTypeUdt { name }), + ) => self.udt_to_json(name, sc_obj)?, + + (ScVal::Map(Some(map)), ScType::Map(map_type)) => self.sc_map_to_json(map, map_type)?, + + (ScVal::U64(u64_), ScType::U64) => Value::Number(serde_json::Number::from(*u64_)), + + (ScVal::I64(i64_), ScType::I64) => Value::Number(serde_json::Number::from(*i64_)), + (int @ ScVal::U128(_), ScType::U128) => { + // Always output u128s as strings + let v: u128 = int + .clone() + .try_into() + .map_err(|()| Error::InvalidValue(Some(ScType::U128)))?; + Value::String(v.to_string()) + } + + (int @ ScVal::I128(_), ScType::I128) => { + // Always output u128s as strings + let v: i128 = int + .clone() + .try_into() + .map_err(|()| Error::InvalidValue(Some(ScType::I128)))?; + Value::String(v.to_string()) + } + + (ScVal::Bytes(v), ScType::Bytes | ScType::BytesN(_)) => { + Value::String(to_lower_hex(v.as_slice())) + } + + (ScVal::Bytes(_), ScType::Udt(_)) => todo!(), + + (ScVal::ContractInstance(_), _) => todo!(), + + (ScVal::Address(v), ScType::Address) => sc_address_to_json(v), + + (ok_val, ScType::Result(result_type)) => { + let ScSpecTypeResult { ok_type, .. } = result_type.as_ref(); + self.xdr_to_json(ok_val, ok_type)? + } + + (x, y) => return Err(Error::InvalidPair(x.clone(), y.clone())), + }) + } +} + +/// # Errors +/// +/// Might return an error +pub fn from_string_primitive(s: &str, t: &ScType) -> Result { + Spec::from_string_primitive(s, t) +} + +fn parse_const_enum(num: &serde_json::Number, enum_: &ScSpecUdtEnumV0) -> Result { + let num = num + .as_u64() + .ok_or_else(|| Error::FailedNumConversion(num.clone()))?; + let num = u32::try_from(num).map_err(|_| Error::EnumConstTooLarge(num))?; + enum_ + .cases + .iter() + .find(|c| c.value == num) + .ok_or(Error::EnumConst(num)) + .map(|c| ScVal::U32(c.value)) +} + +/// # Errors +/// +/// Might return an error +#[allow(clippy::too_many_lines)] +pub fn from_json_primitives(v: &Value, t: &ScType) -> Result { + let val: ScVal = match (t, v) { + // Boolean parsing + (ScType::Bool, Value::Bool(true)) => ScVal::Bool(true), + (ScType::Bool, Value::Bool(false)) => ScVal::Bool(false), + + // Number parsing + (ScType::U128, Value::String(s)) => { + let val: u128 = u128::from_str(s) + .map(Into::into) + .map_err(|_| Error::InvalidValue(Some(t.clone())))?; + let bytes = val.to_be_bytes(); + let (hi, lo) = bytes.split_at(8); + ScVal::U128(UInt128Parts { + hi: u64::from_be_bytes(hi.try_into()?), + lo: u64::from_be_bytes(lo.try_into()?), + }) + } + + (ScType::I128, Value::String(s)) => { + let val: i128 = i128::from_str(s) + .map(Into::into) + .map_err(|_| Error::InvalidValue(Some(t.clone())))?; + let bytes = val.to_be_bytes(); + let (hi, lo) = bytes.split_at(8); + ScVal::I128(Int128Parts { + hi: i64::from_be_bytes(hi.try_into()?), + lo: u64::from_be_bytes(lo.try_into()?), + }) + } + + // Number parsing + (ScType::U256, Value::String(s)) => { + let (hi, lo) = ethnum::U256::from_str_prefixed(s)?.into_words(); + let hi_bytes = hi.to_be_bytes(); + let (hi_hi, hi_lo) = hi_bytes.split_at(8); + let lo_bytes = lo.to_be_bytes(); + let (lo_hi, lo_lo) = lo_bytes.split_at(8); + ScVal::U256(UInt256Parts { + hi_hi: u64::from_be_bytes(hi_hi.try_into()?), + hi_lo: u64::from_be_bytes(hi_lo.try_into()?), + lo_hi: u64::from_be_bytes(lo_hi.try_into()?), + lo_lo: u64::from_be_bytes(lo_lo.try_into()?), + }) + } + (ScType::I256, Value::String(s)) => { + let (hi, lo) = ethnum::I256::from_str_prefixed(s)?.into_words(); + let hi_bytes = hi.to_be_bytes(); + let (hi_hi, hi_lo) = hi_bytes.split_at(8); + let lo_bytes = lo.to_be_bytes(); + let (lo_hi, lo_lo) = lo_bytes.split_at(8); + ScVal::I256(Int256Parts { + hi_hi: i64::from_be_bytes(hi_hi.try_into()?), + hi_lo: u64::from_be_bytes(hi_lo.try_into()?), + lo_hi: u64::from_be_bytes(lo_hi.try_into()?), + lo_lo: u64::from_be_bytes(lo_lo.try_into()?), + }) + } + + (ScType::I32, Value::Number(n)) => ScVal::I32( + n.as_i64() + .ok_or_else(|| Error::InvalidValue(Some(t.clone())))? + .try_into() + .map_err(|_| Error::InvalidValue(Some(t.clone())))?, + ), + (ScType::U32, Value::Number(n)) => ScVal::U32( + n.as_u64() + .ok_or_else(|| Error::InvalidValue(Some(t.clone())))? + .try_into() + .map_err(|_| Error::InvalidValue(Some(t.clone())))?, + ), + (ScType::I64, Value::Number(n)) => ScVal::I64( + n.as_i64() + .ok_or_else(|| Error::InvalidValue(Some(t.clone())))?, + ), + (ScType::U64 | ScType::Timepoint | ScType::Duration, Value::Number(n)) => ScVal::U64( + n.as_u64() + .ok_or_else(|| Error::InvalidValue(Some(t.clone())))?, + ), + + // Symbol parsing + (ScType::Symbol, Value::String(s)) => ScVal::Symbol(ScSymbol( + s.as_bytes() + .try_into() + .map_err(|_| Error::InvalidValue(Some(t.clone())))?, + )), + + (ScType::Address, Value::String(s)) => sc_address_from_json(s)?, + + // Bytes parsing + (bytes @ ScType::BytesN(_), Value::Number(n)) => { + from_json_primitives(&Value::String(format!("{n}")), bytes)? + } + (ScType::BytesN(bytes), Value::String(s)) => ScVal::Bytes(ScBytes({ + if bytes.n == 32 { + // Bytes might be a strkey, try parsing it as one. Contract devs should use the new + // proper Address type, but for backwards compatibility some contracts might use a + // BytesN<32> to represent an Address. + if let Ok(key) = sc_address_from_json(s) { + return Ok(key); + } + } + // Bytes are not an address, just parse as a hex string + utils::padded_hex_from_str(s, bytes.n as usize) + .map_err(|_| Error::InvalidValue(Some(t.clone())))? + .try_into() + .map_err(|_| Error::InvalidValue(Some(t.clone())))? + })), + (ScType::Bytes, Value::Number(n)) => { + from_json_primitives(&Value::String(format!("{n}")), &ScType::Bytes)? + } + (ScType::Bytes, Value::String(s)) => ScVal::Bytes( + hex::decode(s) + .map_err(|_| Error::InvalidValue(Some(t.clone())))? + .try_into() + .map_err(|_| Error::InvalidValue(Some(t.clone())))?, + ), + (ScType::Bytes | ScType::BytesN(_), Value::Array(raw)) => { + let b: Result, Error> = raw + .iter() + .map(|item| { + item.as_u64() + .ok_or_else(|| Error::InvalidValue(Some(t.clone())))? + .try_into() + .map_err(|_| Error::InvalidValue(Some(t.clone()))) + }) + .collect(); + let converted: BytesM<{ u32::MAX }> = b?.try_into().map_err(Error::Xdr)?; + ScVal::Bytes(ScBytes(converted)) + } + + (ScType::String, Value::String(s)) => ScVal::String(ScString( + s.try_into() + .map_err(|_| Error::InvalidValue(Some(t.clone())))?, + )), + // Todo make proper error Which shouldn't exist + (_, raw) => serde_json::from_value(raw.clone())?, + }; + Ok(val) +} + +/// # Errors +/// +/// Might return an error +pub fn to_string(v: &ScVal) -> Result { + #[allow(clippy::match_same_arms)] + Ok(match v { + // If symbols are a top-level thing we omit the wrapping quotes + // TODO: Decide if this is a good idea or not. + ScVal::Symbol(v) => std::str::from_utf8(v.as_slice()) + .map_err(|_| Error::InvalidValue(Some(ScType::Symbol)))? + .to_string(), + ScVal::LedgerKeyContractInstance => "LedgerKeyContractInstance".to_string(), + _ => serde_json::to_string(&to_json(v)?)?, + }) +} + +/// # Errors +/// +/// Might return an error +#[allow(clippy::too_many_lines)] +pub fn to_json(v: &ScVal) -> Result { + #[allow(clippy::match_same_arms)] + let val: Value = match v { + ScVal::Bool(b) => Value::Bool(*b), + ScVal::Void => Value::Null, + ScVal::LedgerKeyContractInstance => Value::String("LedgerKeyContractInstance".to_string()), + ScVal::U64(v) => Value::Number(serde_json::Number::from(*v)), + ScVal::Timepoint(tp) => Value::Number(serde_json::Number::from(tp.0)), + ScVal::Duration(d) => Value::Number(serde_json::Number::from(d.0)), + ScVal::I64(v) => Value::Number(serde_json::Number::from(*v)), + ScVal::U32(v) => Value::Number(serde_json::Number::from(*v)), + ScVal::I32(v) => Value::Number(serde_json::Number::from(*v)), + ScVal::Symbol(v) => Value::String( + std::str::from_utf8(v.as_slice()) + .map_err(|_| Error::InvalidValue(Some(ScType::Symbol)))? + .to_string(), + ), + ScVal::String(v) => Value::String( + std::str::from_utf8(v.as_slice()) + .map_err(|_| Error::InvalidValue(Some(ScType::Symbol)))? + .to_string(), + ), + ScVal::Vec(v) => { + let values: Result, Error> = v.as_ref().map_or_else( + || Ok(vec![]), + |v| { + v.iter() + .map(|item| -> Result { to_json(item) }) + .collect() + }, + ); + Value::Array(values?) + } + ScVal::Map(None) => Value::Object(serde_json::Map::with_capacity(0)), + ScVal::Map(Some(v)) => { + // TODO: What do we do if the key is not a string? + let mut m = serde_json::Map::::with_capacity(v.len()); + for ScMapEntry { key, val } in v.iter() { + let k: String = to_string(key)?; + let v: Value = to_json(val).map_err(|_| Error::InvalidValue(None))?; + m.insert(k, v); + } + Value::Object(m) + } + ScVal::Bytes(v) => Value::String(to_lower_hex(v.as_slice())), + ScVal::Address(v) => sc_address_to_json(v), + ScVal::U128(n) => { + let hi: [u8; 8] = n.hi.to_be_bytes(); + let lo: [u8; 8] = n.lo.to_be_bytes(); + let bytes = [hi, lo].concat(); + // Always output u128s as strings + let v = u128::from_be_bytes( + bytes + .as_slice() + .try_into() + .map_err(|_| Error::InvalidValue(Some(ScType::I128)))?, + ) + .to_string(); + Value::String(v) + } + ScVal::I128(n) => { + let hi: [u8; 8] = n.hi.to_be_bytes(); + let lo: [u8; 8] = n.lo.to_be_bytes(); + let bytes = [hi, lo].concat(); + // Always output u128s as strings + let v = i128::from_be_bytes( + bytes + .as_slice() + .try_into() + .map_err(|_| Error::InvalidValue(Some(ScType::I128)))?, + ) + .to_string(); + Value::String(v) + } + ScVal::U256(u256parts) => { + let bytes = [ + u256parts.hi_hi.to_be_bytes(), + u256parts.hi_lo.to_be_bytes(), + u256parts.lo_hi.to_be_bytes(), + u256parts.lo_lo.to_be_bytes(), + ] + .concat(); + let u256 = ethnum::U256::from_be_bytes( + bytes + .as_slice() + .try_into() + .map_err(|_| Error::InvalidValue(Some(ScType::U256)))?, + ); + Value::String(u256.to_string()) + } + ScVal::I256(i256parts) => { + let bytes = [ + i256parts.hi_hi.to_be_bytes(), + i256parts.hi_lo.to_be_bytes(), + i256parts.lo_hi.to_be_bytes(), + i256parts.lo_lo.to_be_bytes(), + ] + .concat(); + let i256 = ethnum::I256::from_be_bytes( + bytes + .as_slice() + .try_into() + .map_err(|_| Error::InvalidValue(Some(ScType::I256)))?, + ); + Value::String(i256.to_string()) + } + ScVal::ContractInstance(ScContractInstance { + executable: ContractExecutable::Wasm(hash), + .. + }) => json!({ "hash": hash }), + ScVal::ContractInstance(ScContractInstance { + executable: ContractExecutable::StellarAsset, + .. + }) => json!({"SAC": true}), + ScVal::LedgerKeyNonce(ScNonceKey { nonce }) => { + Value::Number(serde_json::Number::from(*nonce)) + } + ScVal::Error(e) => serde_json::to_value(e)?, + }; + Ok(val) +} + +fn sc_address_to_json(v: &ScAddress) -> Value { + match v { + ScAddress::Account(AccountId(PublicKey::PublicKeyTypeEd25519(Uint256(k)))) => { + Value::String(stellar_strkey::ed25519::PublicKey(*k).to_string()) + } + ScAddress::Contract(Hash(h)) => Value::String(stellar_strkey::Contract(*h).to_string()), + } +} + +fn sc_address_from_json(s: &str) -> Result { + stellar_strkey::Strkey::from_string(s) + .map_err(|_| Error::InvalidValue(Some(ScType::Address))) + .map(|parsed| match parsed { + stellar_strkey::Strkey::PublicKeyEd25519(p) => Some(ScVal::Address( + ScAddress::Account(AccountId(PublicKey::PublicKeyTypeEd25519(Uint256(p.0)))), + )), + stellar_strkey::Strkey::Contract(c) => { + Some(ScVal::Address(ScAddress::Contract(Hash(c.0)))) + } + _ => None, + })? + .ok_or(Error::InvalidValue(Some(ScType::Address))) +} + +fn to_lower_hex(bytes: &[u8]) -> String { + let mut res = String::with_capacity(bytes.len()); + for b in bytes { + res.push_str(&format!("{b:02x}")); + } + res +} + +impl Spec { + #[must_use] + pub fn arg_value_name(&self, type_: &ScType, depth: usize) -> Option { + match type_ { + ScType::U64 => Some("u64".to_string()), + ScType::I64 => Some("i64".to_string()), + ScType::U128 => Some("u128".to_string()), + ScType::I128 => Some("i128".to_string()), + ScType::U32 => Some("u32".to_string()), + ScType::I32 => Some("i32".to_string()), + ScType::Bool => Some("bool".to_string()), + ScType::Symbol => Some("Symbol".to_string()), + ScType::Error => Some("Error".to_string()), + ScType::Bytes => Some("hex_bytes".to_string()), + ScType::Address => Some("Address".to_string()), + ScType::Void => Some("Null".to_string()), + ScType::Timepoint => Some("Timepoint".to_string()), + ScType::Duration => Some("Duration".to_string()), + ScType::U256 => Some("u256".to_string()), + ScType::I256 => Some("i256".to_string()), + ScType::String => Some("String".to_string()), + ScType::Option(val) => { + let ScSpecTypeOption { value_type } = val.as_ref(); + let inner = self.arg_value_name(value_type.as_ref(), depth + 1)?; + Some(format!("Option<{inner}>")) + } + ScType::Vec(val) => { + let ScSpecTypeVec { element_type } = val.as_ref(); + let inner = self.arg_value_name(element_type.as_ref(), depth + 1)?; + Some(format!("Array<{inner}>")) + } + ScType::Result(val) => { + let ScSpecTypeResult { + ok_type, + error_type, + } = val.as_ref(); + let ok = self.arg_value_name(ok_type.as_ref(), depth + 1)?; + let error = self.arg_value_name(error_type.as_ref(), depth + 1)?; + Some(format!("Result<{ok}, {error}>")) + } + ScType::Tuple(val) => { + let ScSpecTypeTuple { value_types } = val.as_ref(); + let names = value_types + .iter() + .map(|t| self.arg_value_name(t, depth + 1)) + .collect::>>()? + .join(", "); + Some(format!("Tuple<{names}>")) + } + ScType::Map(val) => { + let ScSpecTypeMap { + key_type, + value_type, + } = val.as_ref(); + let (key, val) = ( + self.arg_value_name(key_type.as_ref(), depth + 1)?, + self.arg_value_name(value_type.as_ref(), depth + 1)?, + ); + Some(format!("Map<{key}, {val}>")) + } + ScType::BytesN(t) => Some(format!("{}_hex_bytes", t.n)), + ScType::Udt(ScSpecTypeUdt { name }) => { + match self.find(&name.to_utf8_string_lossy()).ok()? { + ScSpecEntry::UdtStructV0(ScSpecUdtStructV0 { fields, .. }) + if fields + .first() + .map(|f| f.name.to_utf8_string_lossy() == "0") + .unwrap_or_default() => + { + let fields = fields + .iter() + .map(|t| self.arg_value_name(&t.type_, depth + 1)) + .collect::>>()? + .join(", "); + Some(format!("[{fields}]")) + } + ScSpecEntry::UdtStructV0(strukt) => self.arg_value_udt(strukt, depth), + ScSpecEntry::UdtUnionV0(union) => self.arg_value_union(union, depth), + ScSpecEntry::UdtEnumV0(enum_) => Some(arg_value_enum(enum_)), + ScSpecEntry::FunctionV0(_) | ScSpecEntry::UdtErrorEnumV0(_) => None, + } + } + // No specific value name for these yet. + ScType::Val => None, + } + } + + fn arg_value_udt(&self, strukt: &ScSpecUdtStructV0, depth: usize) -> Option { + let inner = strukt + .fields + .iter() + .map(|f| (f.name.to_utf8_string_lossy(), &f.type_)) + .map(|(name, type_)| { + let type_ = self.arg_value_name(type_, depth + 1)?; + Some(format!("{name}: {type_}")) + }) + .collect::>>()? + .join(", "); + Some(format!("{{ {inner} }}")) + } + + fn arg_value_union(&self, union: &ScSpecUdtUnionV0, depth: usize) -> Option { + union + .cases + .iter() + .map(|f| { + Some(match f { + ScSpecUdtUnionCaseV0::VoidV0(ScSpecUdtUnionCaseVoidV0 { name, .. }) => { + name.to_utf8_string_lossy() + } + ScSpecUdtUnionCaseV0::TupleV0(ScSpecUdtUnionCaseTupleV0 { + name, + type_, + .. + }) => format!( + "{}({})", + name.to_utf8_string_lossy(), + type_ + .iter() + .map(|type_| self.arg_value_name(type_, depth + 1)) + .collect::>>()? + .join(",") + ), + }) + }) + .collect::>>() + .map(|v| v.join(" | ")) + } +} + +fn arg_value_enum(enum_: &ScSpecUdtEnumV0) -> String { + enum_ + .cases + .iter() + .map(|case| case.value.to_string()) + .join(" | ") +} + +// Example implementation +impl Spec { + #[must_use] + pub fn example(&self, type_: &ScType) -> Option { + match type_ { + ScType::U64 => Some("42".to_string()), + ScType::I64 => Some("-42".to_string()), + ScType::U128 => Some("\"1000\"".to_string()), + ScType::I128 => Some("\"-100\"".to_string()), + ScType::U32 => Some("1".to_string()), + ScType::I32 => Some("-1".to_string()), + ScType::Bool => Some("true".to_string()), + ScType::Symbol => Some("\"hello\"".to_string()), + ScType::Error => Some("Error".to_string()), + ScType::Bytes => Some("\"beefface123\"".to_string()), + ScType::Address => { + Some("\"GDIY6AQQ75WMD4W46EYB7O6UYMHOCGQHLAQGQTKHDX4J2DYQCHVCR4W4\"".to_string()) + } + ScType::Void => Some("null".to_string()), + ScType::Timepoint => Some("1234".to_string()), + ScType::Duration => Some("9999".to_string()), + ScType::U256 => Some("\"2000\"".to_string()), + ScType::I256 => Some("\"-20000\"".to_string()), + ScType::String => Some("\"hello world\"".to_string()), + ScType::Option(val) => { + let ScSpecTypeOption { value_type } = val.as_ref(); + self.example(value_type.as_ref()) + } + ScType::Vec(val) => { + let ScSpecTypeVec { element_type } = val.as_ref(); + let inner = self.example(element_type.as_ref())?; + Some(format!("[ {inner} ]")) + } + ScType::Result(val) => { + let ScSpecTypeResult { + ok_type, + error_type, + } = val.as_ref(); + let ok = self.example(ok_type.as_ref())?; + let error = self.example(error_type.as_ref())?; + Some(format!("Result<{ok}, {error}>")) + } + ScType::Tuple(val) => { + let ScSpecTypeTuple { value_types } = val.as_ref(); + let names = value_types + .iter() + .map(|t| self.example(t)) + .collect::>>()? + .join(", "); + Some(format!("[{names}]")) + } + ScType::Map(map) => { + let ScSpecTypeMap { + key_type, + value_type, + } = map.as_ref(); + let (mut key, val) = ( + self.example(key_type.as_ref())?, + self.example(value_type.as_ref())?, + ); + if !matches!(key_type.as_ref(), ScType::Symbol) { + key = format!("\"{key}\""); + } + Some(format!("{{ {key}: {val} }}")) + } + ScType::BytesN(n) => { + let n = n.n as usize; + let res = if n % 2 == 0 { + "ef".repeat(n) + } else { + let mut s = "ef".repeat(n - 1); + s.push('e'); + s + }; + Some(format!("\"{res}\"")) + } + ScType::Udt(ScSpecTypeUdt { name }) => { + self.example_udts(name.to_utf8_string_lossy().as_ref()) + } + // No specific value name for these yet. + ScType::Val => None, + } + } + + fn example_udts(&self, name: &str) -> Option { + match self.find(name).ok() { + Some(ScSpecEntry::UdtStructV0(strukt)) => { + // Check if a tuple strukt + if !strukt.fields.is_empty() && strukt.fields[0].name.to_utf8_string_lossy() == "0" + { + let value_types = strukt + .fields + .iter() + .map(|f| f.type_.clone()) + .collect::>() + .try_into() + .ok()?; + return self.example(&ScType::Tuple(Box::new(ScSpecTypeTuple { value_types }))); + } + let inner = strukt + .fields + .iter() + .map(|f| (f.name.to_utf8_string_lossy(), &f.type_)) + .map(|(name, type_)| { + let type_ = self.example(type_)?; + let name = format!(r#""{name}""#); + Some(format!("{name}: {type_}")) + }) + .collect::>>()? + .join(", "); + Some(format!(r#"{{ {inner} }}"#)) + } + Some(ScSpecEntry::UdtUnionV0(union)) => self.example_union(union), + Some(ScSpecEntry::UdtEnumV0(enum_)) => { + enum_.cases.iter().next().map(|c| c.value.to_string()) + } + Some(ScSpecEntry::FunctionV0(_) | ScSpecEntry::UdtErrorEnumV0(_)) | None => None, + } + } + + fn example_union(&self, union: &ScSpecUdtUnionV0) -> Option { + let res = union + .cases + .iter() + .map(|case| match case { + ScSpecUdtUnionCaseV0::VoidV0(ScSpecUdtUnionCaseVoidV0 { name, .. }) => { + Some(format!("\"{}\"", name.to_utf8_string_lossy())) + } + ScSpecUdtUnionCaseV0::TupleV0(ScSpecUdtUnionCaseTupleV0 { + name, type_, .. + }) => { + if type_.len() == 1 { + let single = self.example(&type_[0])?; + Some(format!("{{\"{}\":{single}}}", name.to_utf8_string_lossy())) + } else { + let names = type_ + .iter() + .map(|t| self.example(t)) + .collect::>>()? + .join(", "); + Some(format!("{{\"{}\":[{names}]}}", name.to_utf8_string_lossy())) + } + } + }) + .collect::>>()?; + Some(res.join("|")) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + use stellar_xdr::curr::ScSpecTypeBytesN; + + #[test] + fn from_json_primitives_bytesn() { + // TODO: Add test for parsing addresses + + // Check it parses hex-encoded bytes + let b = from_json_primitives( + &Value::String("beefface".to_string()), + &ScType::BytesN(ScSpecTypeBytesN { n: 4 }), + ) + .unwrap(); + assert_eq!( + b, + ScVal::Bytes(ScBytes(vec![0xbe, 0xef, 0xfa, 0xce].try_into().unwrap())) + ); + + // Check it parses hex-encoded bytes when they are all numbers. Normally the json would + // interpret the CLI arg as a number, so we need a special case there. + let b = from_json_primitives( + &Value::Number(4554.into()), + &ScType::BytesN(ScSpecTypeBytesN { n: 2 }), + ) + .unwrap(); + assert_eq!( + b, + ScVal::Bytes(ScBytes(vec![0x45, 0x54].try_into().unwrap())) + ); + } + + #[test] + fn from_json_primitives_bytes() { + // Check it parses hex-encoded bytes + let b = + from_json_primitives(&Value::String("beefface".to_string()), &ScType::Bytes).unwrap(); + assert_eq!( + b, + ScVal::Bytes(ScBytes(vec![0xbe, 0xef, 0xfa, 0xce].try_into().unwrap())) + ); + + // Check it parses hex-encoded bytes when they are all numbers. Normally the json would + // interpret the CLI arg as a number, so we need a special case there. + let b = from_json_primitives(&Value::Number(4554.into()), &ScType::Bytes).unwrap(); + assert_eq!( + b, + ScVal::Bytes(ScBytes(vec![0x45, 0x54].try_into().unwrap())) + ); + } + + #[test] + fn test_sc_address_from_json_strkey() { + // All zero contract address + match sc_address_from_json("CAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABSC4") { + Ok(addr) => assert_eq!(addr, ScVal::Address(ScAddress::Contract(Hash([0; 32])))), + Err(e) => panic!("Unexpected error: {e}"), + } + + // Real contract address + match sc_address_from_json("CA3D5KRYM6CB7OWQ6TWYRR3Z4T7GNZLKERYNZGGA5SOAOPIFY6YQGAXE") { + Ok(addr) => assert_eq!( + addr, + ScVal::Address(ScAddress::Contract( + [ + 0x36, 0x3e, 0xaa, 0x38, 0x67, 0x84, 0x1f, 0xba, 0xd0, 0xf4, 0xed, 0x88, + 0xc7, 0x79, 0xe4, 0xfe, 0x66, 0xe5, 0x6a, 0x24, 0x70, 0xdc, 0x98, 0xc0, + 0xec, 0x9c, 0x07, 0x3d, 0x05, 0xc7, 0xb1, 0x03, + ] + .try_into() + .unwrap() + )) + ), + Err(e) => panic!("Unexpected error: {e}"), + } + + // All zero user account address + match sc_address_from_json("GAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWHF") { + Ok(addr) => assert_eq!( + addr, + ScVal::Address(ScAddress::Account(AccountId( + PublicKey::PublicKeyTypeEd25519([0; 32].try_into().unwrap()) + ))) + ), + Err(e) => panic!("Unexpected error: {e}"), + } + + // Real user account address + match sc_address_from_json("GA3D5KRYM6CB7OWQ6TWYRR3Z4T7GNZLKERYNZGGA5SOAOPIFY6YQHES5") { + Ok(addr) => assert_eq!( + addr, + ScVal::Address(ScAddress::Account(AccountId( + PublicKey::PublicKeyTypeEd25519( + [ + 0x36, 0x3e, 0xaa, 0x38, 0x67, 0x84, 0x1f, 0xba, 0xd0, 0xf4, 0xed, 0x88, + 0xc7, 0x79, 0xe4, 0xfe, 0x66, 0xe5, 0x6a, 0x24, 0x70, 0xdc, 0x98, 0xc0, + 0xec, 0x9c, 0x07, 0x3d, 0x05, 0xc7, 0xb1, 0x03, + ] + .try_into() + .unwrap() + ) + ))) + ), + Err(e) => panic!("Unexpected error: {e}"), + } + } +} diff --git a/cmd/crates/soroban-spec-tools/src/utils.rs b/cmd/crates/soroban-spec-tools/src/utils.rs new file mode 100644 index 00000000..66b153a1 --- /dev/null +++ b/cmd/crates/soroban-spec-tools/src/utils.rs @@ -0,0 +1,294 @@ +use base64::{engine::general_purpose::STANDARD as base64, Engine as _}; +use hex::FromHexError; +use std::{ + fmt::Display, + io::{self, Cursor}, +}; + +use stellar_xdr::curr::{ + Limited, Limits, ReadXdr, ScEnvMetaEntry, ScMetaEntry, ScMetaV0, ScSpecEntry, ScSpecFunctionV0, + ScSpecUdtEnumV0, ScSpecUdtErrorEnumV0, ScSpecUdtStructV0, ScSpecUdtUnionV0, StringM, +}; + +pub struct ContractSpec { + pub env_meta_base64: Option, + pub env_meta: Vec, + pub meta_base64: Option, + pub meta: Vec, + pub spec_base64: Option, + pub spec: Vec, +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("reading file {filepath}: {error}")] + CannotReadContractFile { + filepath: std::path::PathBuf, + error: io::Error, + }, + #[error("cannot parse wasm file {file}: {error}")] + CannotParseWasm { + file: std::path::PathBuf, + error: wasmparser::BinaryReaderError, + }, + #[error("xdr processing error: {0}")] + Xdr(#[from] stellar_xdr::curr::Error), + + #[error(transparent)] + Parser(#[from] wasmparser::BinaryReaderError), +} + +impl ContractSpec { + pub fn new(bytes: &[u8]) -> Result { + let mut env_meta: Option<&[u8]> = None; + let mut meta: Option<&[u8]> = None; + let mut spec: Option<&[u8]> = None; + for payload in wasmparser::Parser::new(0).parse_all(bytes) { + let payload = payload?; + if let wasmparser::Payload::CustomSection(section) = payload { + let out = match section.name() { + "contractenvmetav0" => &mut env_meta, + "contractmetav0" => &mut meta, + "contractspecv0" => &mut spec, + _ => continue, + }; + *out = Some(section.data()); + }; + } + + let mut env_meta_base64 = None; + let env_meta = if let Some(env_meta) = env_meta { + env_meta_base64 = Some(base64.encode(env_meta)); + let cursor = Cursor::new(env_meta); + let mut read = Limited::new(cursor, Limits::none()); + ScEnvMetaEntry::read_xdr_iter(&mut read).collect::, _>>()? + } else { + vec![] + }; + + let mut meta_base64 = None; + let meta = if let Some(meta) = meta { + meta_base64 = Some(base64.encode(meta)); + let cursor = Cursor::new(meta); + let mut read = Limited::new(cursor, Limits::none()); + ScMetaEntry::read_xdr_iter(&mut read).collect::, _>>()? + } else { + vec![] + }; + + let mut spec_base64 = None; + let spec = if let Some(spec) = spec { + spec_base64 = Some(base64.encode(spec)); + let cursor = Cursor::new(spec); + let mut read = Limited::new(cursor, Limits::none()); + ScSpecEntry::read_xdr_iter(&mut read).collect::, _>>()? + } else { + vec![] + }; + + Ok(ContractSpec { + env_meta_base64, + env_meta, + meta_base64, + meta, + spec_base64, + spec, + }) + } +} + +impl Display for ContractSpec { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if let Some(env_meta) = &self.env_meta_base64 { + writeln!(f, "Env Meta: {env_meta}")?; + for env_meta_entry in &self.env_meta { + match env_meta_entry { + ScEnvMetaEntry::ScEnvMetaKindInterfaceVersion(v) => { + writeln!(f, " • Interface Version: {v}")?; + } + } + } + writeln!(f)?; + } else { + writeln!(f, "Env Meta: None\n")?; + } + + if let Some(_meta) = &self.meta_base64 { + writeln!(f, "Contract Meta:")?; + for meta_entry in &self.meta { + match meta_entry { + ScMetaEntry::ScMetaV0(ScMetaV0 { key, val }) => { + writeln!(f, " • {key}: {val}")?; + } + } + } + writeln!(f)?; + } else { + writeln!(f, "Contract Meta: None\n")?; + } + + if let Some(_spec_base64) = &self.spec_base64 { + writeln!(f, "Contract Spec:")?; + for spec_entry in &self.spec { + match spec_entry { + ScSpecEntry::FunctionV0(func) => write_func(f, func)?, + ScSpecEntry::UdtUnionV0(udt) => write_union(f, udt)?, + ScSpecEntry::UdtStructV0(udt) => write_struct(f, udt)?, + ScSpecEntry::UdtEnumV0(udt) => write_enum(f, udt)?, + ScSpecEntry::UdtErrorEnumV0(udt) => write_error(f, udt)?, + } + } + } else { + writeln!(f, "Contract Spec: None")?; + } + Ok(()) + } +} + +fn write_func(f: &mut std::fmt::Formatter<'_>, func: &ScSpecFunctionV0) -> std::fmt::Result { + writeln!(f, " • Function: {}", func.name.to_utf8_string_lossy())?; + if func.doc.len() > 0 { + writeln!( + f, + " Docs: {}", + &indent(&func.doc.to_utf8_string_lossy(), 11).trim() + )?; + } + writeln!( + f, + " Inputs: {}", + indent(&format!("{:#?}", func.inputs), 5).trim() + )?; + writeln!( + f, + " Output: {}", + indent(&format!("{:#?}", func.outputs), 5).trim() + )?; + writeln!(f)?; + Ok(()) +} + +fn write_union(f: &mut std::fmt::Formatter<'_>, udt: &ScSpecUdtUnionV0) -> std::fmt::Result { + writeln!(f, " • Union: {}", format_name(&udt.lib, &udt.name))?; + if udt.doc.len() > 0 { + writeln!( + f, + " Docs: {}", + indent(&udt.doc.to_utf8_string_lossy(), 10).trim() + )?; + } + writeln!(f, " Cases:")?; + for case in udt.cases.iter() { + writeln!(f, " • {}", indent(&format!("{case:#?}"), 8).trim())?; + } + writeln!(f)?; + Ok(()) +} + +fn write_struct(f: &mut std::fmt::Formatter<'_>, udt: &ScSpecUdtStructV0) -> std::fmt::Result { + writeln!(f, " • Struct: {}", format_name(&udt.lib, &udt.name))?; + if udt.doc.len() > 0 { + writeln!( + f, + " Docs: {}", + indent(&udt.doc.to_utf8_string_lossy(), 10).trim() + )?; + } + writeln!(f, " Fields:")?; + for field in udt.fields.iter() { + writeln!( + f, + " • {}: {}", + field.name.to_utf8_string_lossy(), + indent(&format!("{:#?}", field.type_), 8).trim() + )?; + if field.doc.len() > 0 { + writeln!(f, "{}", indent(&format!("{:#?}", field.doc), 8))?; + } + } + writeln!(f)?; + Ok(()) +} + +fn write_enum(f: &mut std::fmt::Formatter<'_>, udt: &ScSpecUdtEnumV0) -> std::fmt::Result { + writeln!(f, " • Enum: {}", format_name(&udt.lib, &udt.name))?; + if udt.doc.len() > 0 { + writeln!( + f, + " Docs: {}", + indent(&udt.doc.to_utf8_string_lossy(), 10).trim() + )?; + } + writeln!(f, " Cases:")?; + for case in udt.cases.iter() { + writeln!(f, " • {}", indent(&format!("{case:#?}"), 8).trim())?; + } + writeln!(f)?; + Ok(()) +} + +fn write_error(f: &mut std::fmt::Formatter<'_>, udt: &ScSpecUdtErrorEnumV0) -> std::fmt::Result { + writeln!(f, " • Error: {}", format_name(&udt.lib, &udt.name))?; + if udt.doc.len() > 0 { + writeln!( + f, + " Docs: {}", + indent(&udt.doc.to_utf8_string_lossy(), 10).trim() + )?; + } + writeln!(f, " Cases:")?; + for case in udt.cases.iter() { + writeln!(f, " • {}", indent(&format!("{case:#?}"), 8).trim())?; + } + writeln!(f)?; + Ok(()) +} + +fn indent(s: &str, n: usize) -> String { + let pad = " ".repeat(n); + s.lines() + .map(|line| format!("{pad}{line}")) + .collect::>() + .join("\n") +} + +fn format_name(lib: &StringM<80>, name: &StringM<60>) -> String { + if lib.len() > 0 { + format!( + "{}::{}", + lib.to_utf8_string_lossy(), + name.to_utf8_string_lossy() + ) + } else { + name.to_utf8_string_lossy() + } +} + +/// # Errors +/// +/// Might return an error +pub fn padded_hex_from_str(s: &str, n: usize) -> Result, FromHexError> { + if s.len() > n * 2 { + return Err(FromHexError::InvalidStringLength); + } + let mut decoded = vec![0u8; n]; + let padded = format!("{s:0>width$}", width = n * 2); + hex::decode_to_slice(padded, &mut decoded)?; + Ok(decoded) +} + +/// # Errors +/// +/// Might return an error +pub fn contract_id_from_str(contract_id: &str) -> Result<[u8; 32], stellar_strkey::DecodeError> { + stellar_strkey::Contract::from_string(contract_id) + .map(|strkey| strkey.0) + .or_else(|_| { + // strkey failed, try to parse it as a hex string, for backwards compatibility. + padded_hex_from_str(contract_id, 32) + .map_err(|_| stellar_strkey::DecodeError::Invalid)? + .try_into() + .map_err(|_| stellar_strkey::DecodeError::Invalid) + }) + .map_err(|_| stellar_strkey::DecodeError::Invalid) +} diff --git a/cmd/crates/soroban-spec-typescript/Cargo.toml b/cmd/crates/soroban-spec-typescript/Cargo.toml new file mode 100644 index 00000000..cc17073f --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "soroban-spec-typescript" +description = "Soroban contract spec utilities for generating JSON." +homepage = "https://github.com/stellar/soroban-tools" +repository = "https://github.com/stellar/soroban-tools" +authors = ["Stellar Development Foundation "] +readme = "README.md" +license = "Apache-2.0" +version.workspace = true +edition = "2021" +rust-version.workspace = true + +[dependencies] +soroban-spec = { workspace = true } +thiserror = "1.0.32" +serde = "1.0.82" +serde_derive = "1.0.82" +serde_json = "1.0.82" +sha2 = "0.9.9" +prettyplease = "0.2.4" +include_dir = { version = "0.7.3", features = ["glob"] } +heck = "0.4.1" +itertools = { workspace = true } +base64 = { workspace = true } + +[dependencies.stellar-xdr] +workspace = true +features = ["curr", "std", "serde", "base64"] + +[dev_dependencies] +temp-dir = "0.1.11" +pretty_assertions = "1.2.1" +walkdir = "2.3.3" diff --git a/cmd/crates/soroban-spec-typescript/README.md b/cmd/crates/soroban-spec-typescript/README.md new file mode 100644 index 00000000..4cc3f522 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/README.md @@ -0,0 +1,4 @@ +# soroban-spec-json + +Generation of TypeScript client bindings from Soroban contract specification / +interface. diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/.gitignore b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/.gitignore new file mode 100644 index 00000000..72aae85f --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/.gitignore @@ -0,0 +1,2 @@ +node_modules/ +out/ diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/README.md b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/README.md new file mode 100644 index 00000000..03f87f30 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/README.md @@ -0,0 +1,54 @@ +# test_custom_types JS + +JS library for interacting with [Soroban](https://soroban.stellar.org/) smart contract `test_custom_types` via Soroban RPC. + +This library was automatically generated by Soroban CLI using a command similar to: + +```bash +soroban contract bindings ts \ + --rpc-url https://rpc-futurenet.stellar.org:443 \ + --network-passphrase "Test SDF Future Network ; October 2022" \ + --contract-id CBYMYMSDF6FBDNCFJCRC7KMO4REYFPOH2U4N7FXI3GJO6YXNCQ43CDSK \ + --output-dir ./path/to/test_custom_types +``` + +The network passphrase and contract ID are exported from [index.ts](./src/index.ts) in the `networks` constant. If you are the one who generated this library and you know that this contract is also deployed to other networks, feel free to update `networks` with other valid options. This will help your contract consumers use this library more easily. + +# To publish or not to publish + +This library is suitable for publishing to NPM. You can publish it to NPM using the `npm publish` command. + +But you don't need to publish this library to NPM to use it. You can add it to your project's `package.json` using a file path: + +```json +"dependencies": { + "test_custom_types": "./path/to/this/folder" +} +``` + +However, we've actually encountered [frustration](https://github.com/stellar/soroban-example-dapp/pull/117#discussion_r1232873560) using local libraries with NPM in this way. Though it seems a bit messy, we suggest generating the library directly to your `node_modules` folder automatically after each install by using a `postinstall` script. We've had the least trouble with this approach. NPM will automatically remove what it sees as erroneous directories during the `install` step, and then regenerate them when it gets to your `postinstall` step, which will keep the library up-to-date with your contract. + +```json +"scripts": { + "postinstall": "soroban contract bindings ts --rpc-url https://rpc-futurenet.stellar.org:443 --network-passphrase \"Test SDF Future Network ; October 2022\" --id CBYMYMSDF6FBDNCFJCRC7KMO4REYFPOH2U4N7FXI3GJO6YXNCQ43CDSK --name test_custom_types" +} +``` + +Obviously you need to adjust the above command based on the actual command you used to generate the library. + +# Use it + +Now that you have your library up-to-date and added to your project, you can import it in a file and see inline documentation for all of its exported methods: + +```js +import { Contract, networks } from "test_custom_types" + +const contract = new Contract({ + ...networks.futurenet, // for example; check which networks this library exports + rpcUrl: '...', // use your own, or find one for testing at https://soroban.stellar.org/docs/reference/rpc#public-rpc-providers +}) + +contract.| +``` + +As long as your editor is configured to show JavaScript/TypeScript documentation, you can pause your typing at that `|` to get a list of all exports and inline-documentation for each. It exports a separate [async](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function) function for each method in the smart contract, with documentation for each generated from the comments the contract's author included in the original source code. diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/cjs/assembled-tx.d.ts b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/cjs/assembled-tx.d.ts new file mode 100644 index 00000000..1d5e6f5e --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/cjs/assembled-tx.d.ts @@ -0,0 +1,184 @@ +import { Account, Address, Operation, SorobanRpc, xdr } from "@stellar/stellar-sdk"; +import type { Memo, MemoType, Transaction } from "@stellar/stellar-sdk"; +import type { ClassOptions, MethodOptions, Wallet, XDR_BASE64 } from "./method-options.js"; +export type Tx = Transaction, Operation[]>; +export declare class ExpiredStateError extends Error { +} +export declare class NeedsMoreSignaturesError extends Error { +} +export declare class WalletDisconnectedError extends Error { +} +export declare class SendResultOnlyError extends Error { +} +export declare class SendFailedError extends Error { +} +export declare class NoUnsignedNonInvokerAuthEntriesError extends Error { +} +type SendTx = SorobanRpc.Api.SendTransactionResponse; +type GetTx = SorobanRpc.Api.GetTransactionResponse; +export type u32 = number; +export type i32 = number; +export type u64 = bigint; +export type i64 = bigint; +export type u128 = bigint; +export type i128 = bigint; +export type u256 = bigint; +export type i256 = bigint; +export type Option = T | undefined; +export type Typepoint = bigint; +export type Duration = bigint; +export { Address }; +export interface Error_ { + message: string; +} +export interface Result { + unwrap(): T; + unwrapErr(): E; + isOk(): boolean; + isErr(): boolean; +} +export declare class Ok implements Result { + readonly value: T; + constructor(value: T); + unwrapErr(): E; + unwrap(): T; + isOk(): boolean; + isErr(): boolean; +} +export declare class Err implements Result { + readonly error: E; + constructor(error: E); + unwrapErr(): E; + unwrap(): never; + isOk(): boolean; + isErr(): boolean; +} +export declare const contractErrorPattern: RegExp; +type AssembledTransactionOptions = MethodOptions & ClassOptions & { + method: string; + args?: any[]; + parseResultXdr: (xdr: string | xdr.ScVal | Err) => T; +}; +export declare const NULL_ACCOUNT = "GAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWHF"; +export declare class AssembledTransaction { + options: AssembledTransactionOptions; + raw: Tx; + private simulation?; + private simulationResult?; + private simulationTransactionData?; + private server; + toJSON(): string; + static fromJSON(options: Omit, 'args'>, { tx, simulationResult, simulationTransactionData }: { + tx: XDR_BASE64; + simulationResult: { + auth: XDR_BASE64[]; + retval: XDR_BASE64; + }; + simulationTransactionData: XDR_BASE64; + }): AssembledTransaction; + private constructor(); + static fromSimulation(options: AssembledTransactionOptions): Promise>; + simulate: () => Promise; + get simulationData(): { + result: SorobanRpc.Api.SimulateHostFunctionResult; + transactionData: xdr.SorobanTransactionData; + }; + get result(): T; + parseError(errorMessage: string): Err | undefined; + getWallet: () => Promise; + getPublicKey: () => Promise; + /** + * Get account details from the Soroban network for the publicKey currently + * selected in user's wallet. If not connected to Freighter, use placeholder + * null account. + */ + getAccount: () => Promise; + /** + * Sign the transaction with the `wallet` (default Freighter), then send to + * the network and return a `SentTransaction` that keeps track of all the + * attempts to send and fetch the transaction from the network. + */ + signAndSend: ({ secondsToWait, force }?: { + /** + * Wait `secondsToWait` seconds (default: 10) for both the transaction to SEND successfully (will keep trying if the server returns `TRY_AGAIN_LATER`), as well as for the transaction to COMPLETE (will keep checking if the server returns `PENDING`). + */ + secondsToWait?: number | undefined; + /** + * If `true`, sign and send the transaction even if it is a read call. + */ + force?: boolean | undefined; + }) => Promise>; + getStorageExpiration: () => Promise; + /** + * Get a list of accounts, other than the invoker of the simulation, that + * need to sign auth entries in this transaction. + * + * Soroban allows multiple people to sign a transaction. Someone needs to + * sign the final transaction envelope; this person/account is called the + * _invoker_, or _source_. Other accounts might need to sign individual auth + * entries in the transaction, if they're not also the invoker. + * + * This function returns a list of accounts that need to sign auth entries, + * assuming that the same invoker/source account will sign the final + * transaction envelope as signed the initial simulation. + * + * One at a time, for each public key in this array, you will need to + * serialize this transaction with `toJSON`, send to the owner of that key, + * deserialize the transaction with `txFromJson`, and call + * {@link signAuthEntries}. Then re-serialize and send to the next account + * in this list. + */ + needsNonInvokerSigningBy: ({ includeAlreadySigned, }?: { + /** + * Whether or not to include auth entries that have already been signed. Default: false + */ + includeAlreadySigned?: boolean | undefined; + }) => Promise; + preImageFor(entry: xdr.SorobanAuthorizationEntry, signatureExpirationLedger: number): xdr.HashIdPreimage; + /** + * If {@link needsNonInvokerSigningBy} returns a non-empty list, you can serialize + * the transaction with `toJSON`, send it to the owner of one of the public keys + * in the map, deserialize with `txFromJSON`, and call this method on their + * machine. Internally, this will use `signAuthEntry` function from connected + * `wallet` for each. + * + * Then, re-serialize the transaction and either send to the next + * `needsNonInvokerSigningBy` owner, or send it back to the original account + * who simulated the transaction so they can {@link sign} the transaction + * envelope and {@link send} it to the network. + * + * Sending to all `needsNonInvokerSigningBy` owners in parallel is not currently + * supported! + */ + signAuthEntries: (expiration?: number | Promise) => Promise; + get isReadCall(): boolean; + hasRealInvoker: () => Promise; +} +/** + * A transaction that has been sent to the Soroban network. This happens in two steps: + * + * 1. `sendTransaction`: initial submission of the transaction to the network. + * This step can run into problems, and will be retried with exponential + * backoff if it does. See all attempts in `sendTransactionResponseAll` and the + * most recent attempt in `sendTransactionResponse`. + * 2. `getTransaction`: once the transaction has been submitted to the network + * successfully, you need to wait for it to finalize to get the results of the + * transaction. This step can also run into problems, and will be retried with + * exponential backoff if it does. See all attempts in + * `getTransactionResponseAll` and the most recent attempt in + * `getTransactionResponse`. + */ +declare class SentTransaction { + options: AssembledTransactionOptions; + assembled: AssembledTransaction; + server: SorobanRpc.Server; + signed: Tx; + sendTransactionResponse?: SendTx; + sendTransactionResponseAll?: SendTx[]; + getTransactionResponse?: GetTx; + getTransactionResponseAll?: GetTx[]; + constructor(options: AssembledTransactionOptions, assembled: AssembledTransaction); + static init: (options: AssembledTransactionOptions, assembled: AssembledTransaction, secondsToWait?: number) => Promise>; + private send; + get result(): T; +} diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/cjs/assembled-tx.js b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/cjs/assembled-tx.js new file mode 100644 index 00000000..c60a6e5f --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/cjs/assembled-tx.js @@ -0,0 +1,462 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AssembledTransaction = exports.NULL_ACCOUNT = exports.contractErrorPattern = exports.Err = exports.Ok = exports.Address = exports.NoUnsignedNonInvokerAuthEntriesError = exports.SendFailedError = exports.SendResultOnlyError = exports.WalletDisconnectedError = exports.NeedsMoreSignaturesError = exports.ExpiredStateError = void 0; +const stellar_sdk_1 = require("@stellar/stellar-sdk"); +Object.defineProperty(exports, "Address", { enumerable: true, get: function () { return stellar_sdk_1.Address; } }); +const buffer_1 = require("buffer"); +class ExpiredStateError extends Error { +} +exports.ExpiredStateError = ExpiredStateError; +class NeedsMoreSignaturesError extends Error { +} +exports.NeedsMoreSignaturesError = NeedsMoreSignaturesError; +class WalletDisconnectedError extends Error { +} +exports.WalletDisconnectedError = WalletDisconnectedError; +class SendResultOnlyError extends Error { +} +exports.SendResultOnlyError = SendResultOnlyError; +class SendFailedError extends Error { +} +exports.SendFailedError = SendFailedError; +class NoUnsignedNonInvokerAuthEntriesError extends Error { +} +exports.NoUnsignedNonInvokerAuthEntriesError = NoUnsignedNonInvokerAuthEntriesError; +; +; +class Ok { + value; + constructor(value) { + this.value = value; + } + unwrapErr() { + throw new Error('No error'); + } + unwrap() { + return this.value; + } + isOk() { + return true; + } + isErr() { + return !this.isOk(); + } +} +exports.Ok = Ok; +class Err { + error; + constructor(error) { + this.error = error; + } + unwrapErr() { + return this.error; + } + unwrap() { + throw new Error(this.error.message); + } + isOk() { + return false; + } + isErr() { + return !this.isOk(); + } +} +exports.Err = Err; +exports.contractErrorPattern = /Error\(Contract, #(\d+)\)/; +exports.NULL_ACCOUNT = "GAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWHF"; +class AssembledTransaction { + options; + raw; + simulation; + simulationResult; + simulationTransactionData; + server; + toJSON() { + return JSON.stringify({ + method: this.options.method, + tx: this.raw?.toXDR(), + simulationResult: { + auth: this.simulationData.result.auth.map(a => a.toXDR('base64')), + retval: this.simulationData.result.retval.toXDR('base64'), + }, + simulationTransactionData: this.simulationData.transactionData.toXDR('base64'), + }); + } + static fromJSON(options, { tx, simulationResult, simulationTransactionData }) { + const txn = new AssembledTransaction(options); + txn.raw = stellar_sdk_1.TransactionBuilder.fromXDR(tx, options.networkPassphrase); + txn.simulationResult = { + auth: simulationResult.auth.map(a => stellar_sdk_1.xdr.SorobanAuthorizationEntry.fromXDR(a, 'base64')), + retval: stellar_sdk_1.xdr.ScVal.fromXDR(simulationResult.retval, 'base64'), + }; + txn.simulationTransactionData = stellar_sdk_1.xdr.SorobanTransactionData.fromXDR(simulationTransactionData, 'base64'); + return txn; + } + constructor(options) { + this.options = options; + this.server = new stellar_sdk_1.SorobanRpc.Server(this.options.rpcUrl, { + allowHttp: this.options.rpcUrl.startsWith("http://"), + }); + } + static async fromSimulation(options) { + const tx = new AssembledTransaction(options); + const contract = new stellar_sdk_1.Contract(options.contractId); + tx.raw = new stellar_sdk_1.TransactionBuilder(await tx.getAccount(), { + fee: options.fee?.toString(10) ?? stellar_sdk_1.BASE_FEE, + networkPassphrase: options.networkPassphrase, + }) + .addOperation(contract.call(options.method, ...(options.args ?? []))) + .setTimeout(stellar_sdk_1.TimeoutInfinite) + .build(); + return await tx.simulate(); + } + simulate = async () => { + if (!this.raw) + throw new Error('Transaction has not yet been assembled'); + this.simulation = await this.server.simulateTransaction(this.raw); + if (stellar_sdk_1.SorobanRpc.Api.isSimulationSuccess(this.simulation)) { + this.raw = stellar_sdk_1.SorobanRpc.assembleTransaction(this.raw, this.simulation).build(); + } + return this; + }; + get simulationData() { + if (this.simulationResult && this.simulationTransactionData) { + return { + result: this.simulationResult, + transactionData: this.simulationTransactionData, + }; + } + // else, we know we just did the simulation on this machine + const simulation = this.simulation; + if (stellar_sdk_1.SorobanRpc.Api.isSimulationError(simulation)) { + throw new Error(`Transaction simulation failed: "${simulation.error}"`); + } + if (stellar_sdk_1.SorobanRpc.Api.isSimulationRestore(simulation)) { + throw new ExpiredStateError(`You need to restore some contract state before you can invoke this method. ${JSON.stringify(simulation, null, 2)}`); + } + if (!simulation.result) { + throw new Error(`Expected an invocation simulation, but got no 'result' field. Simulation: ${JSON.stringify(simulation, null, 2)}`); + } + // add to object for serialization & deserialization + this.simulationResult = simulation.result; + this.simulationTransactionData = simulation.transactionData.build(); + return { + result: this.simulationResult, + transactionData: this.simulationTransactionData, + }; + } + get result() { + try { + return this.options.parseResultXdr(this.simulationData.result.retval); + } + catch (e) { + let err = this.parseError(e.toString()); + if (err) + return err; + throw e; + } + } + parseError(errorMessage) { + if (!this.options.errorTypes) + return; + const match = errorMessage.match(exports.contractErrorPattern); + if (!match) + return; + let i = parseInt(match[1], 10); + let err = this.options.errorTypes[i]; + if (err) + return new Err(err); + } + getWallet = async () => { + return this.options.wallet ?? (await Promise.resolve().then(() => require("@stellar/freighter-api"))).default; + }; + getPublicKey = async () => { + const wallet = await this.getWallet(); + if (await wallet.isConnected() && await wallet.isAllowed()) { + return (await wallet.getUserInfo()).publicKey; + } + }; + /** + * Get account details from the Soroban network for the publicKey currently + * selected in user's wallet. If not connected to Freighter, use placeholder + * null account. + */ + getAccount = async () => { + const publicKey = await this.getPublicKey(); + return publicKey + ? await this.server.getAccount(publicKey) + : new stellar_sdk_1.Account(exports.NULL_ACCOUNT, "0"); + }; + /** + * Sign the transaction with the `wallet` (default Freighter), then send to + * the network and return a `SentTransaction` that keeps track of all the + * attempts to send and fetch the transaction from the network. + */ + signAndSend = async ({ secondsToWait = 10, force = false } = {}) => { + if (!this.raw) { + throw new Error('Transaction has not yet been simulated'); + } + if (!force && this.isReadCall) { + throw new Error('This is a read call. It requires no signature or sending. Use `force: true` to sign and send anyway.'); + } + if (!await this.hasRealInvoker()) { + throw new WalletDisconnectedError('Wallet is not connected'); + } + if (this.raw.source !== (await this.getAccount()).accountId()) { + throw new Error(`You must submit the transaction with the account that originally created it. Please switch to the wallet with "${this.raw.source}" as its public key.`); + } + if ((await this.needsNonInvokerSigningBy()).length) { + throw new NeedsMoreSignaturesError('Transaction requires more signatures. See `needsNonInvokerSigningBy` for details.'); + } + return await SentTransaction.init(this.options, this, secondsToWait); + }; + getStorageExpiration = async () => { + const entryRes = await this.server.getLedgerEntries(new stellar_sdk_1.Contract(this.options.contractId).getFootprint()); + if (!entryRes.entries || + !entryRes.entries.length || + !entryRes.entries[0].liveUntilLedgerSeq) + throw new Error('failed to get ledger entry'); + return entryRes.entries[0].liveUntilLedgerSeq; + }; + /** + * Get a list of accounts, other than the invoker of the simulation, that + * need to sign auth entries in this transaction. + * + * Soroban allows multiple people to sign a transaction. Someone needs to + * sign the final transaction envelope; this person/account is called the + * _invoker_, or _source_. Other accounts might need to sign individual auth + * entries in the transaction, if they're not also the invoker. + * + * This function returns a list of accounts that need to sign auth entries, + * assuming that the same invoker/source account will sign the final + * transaction envelope as signed the initial simulation. + * + * One at a time, for each public key in this array, you will need to + * serialize this transaction with `toJSON`, send to the owner of that key, + * deserialize the transaction with `txFromJson`, and call + * {@link signAuthEntries}. Then re-serialize and send to the next account + * in this list. + */ + needsNonInvokerSigningBy = async ({ includeAlreadySigned = false, } = {}) => { + if (!this.raw) { + throw new Error('Transaction has not yet been simulated'); + } + // We expect that any transaction constructed by these libraries has a + // single operation, which is an InvokeHostFunction operation. The host + // function being invoked is the contract method call. + if (!("operations" in this.raw)) { + throw new Error(`Unexpected Transaction type; no operations: ${JSON.stringify(this.raw)}`); + } + const rawInvokeHostFunctionOp = this.raw + .operations[0]; + return [...new Set((rawInvokeHostFunctionOp.auth ?? []).filter(entry => entry.credentials().switch() === + stellar_sdk_1.xdr.SorobanCredentialsType.sorobanCredentialsAddress() && + (includeAlreadySigned || + entry.credentials().address().signature().switch().name === 'scvVoid')).map(entry => stellar_sdk_1.StrKey.encodeEd25519PublicKey(entry.credentials().address().address().accountId().ed25519())))]; + }; + preImageFor(entry, signatureExpirationLedger) { + const addrAuth = entry.credentials().address(); + return stellar_sdk_1.xdr.HashIdPreimage.envelopeTypeSorobanAuthorization(new stellar_sdk_1.xdr.HashIdPreimageSorobanAuthorization({ + networkId: (0, stellar_sdk_1.hash)(buffer_1.Buffer.from(this.options.networkPassphrase)), + nonce: addrAuth.nonce(), + invocation: entry.rootInvocation(), + signatureExpirationLedger, + })); + } + /** + * If {@link needsNonInvokerSigningBy} returns a non-empty list, you can serialize + * the transaction with `toJSON`, send it to the owner of one of the public keys + * in the map, deserialize with `txFromJSON`, and call this method on their + * machine. Internally, this will use `signAuthEntry` function from connected + * `wallet` for each. + * + * Then, re-serialize the transaction and either send to the next + * `needsNonInvokerSigningBy` owner, or send it back to the original account + * who simulated the transaction so they can {@link sign} the transaction + * envelope and {@link send} it to the network. + * + * Sending to all `needsNonInvokerSigningBy` owners in parallel is not currently + * supported! + */ + signAuthEntries = async ( + /** + * When to set each auth entry to expire. Could be any number of blocks in + * the future. Can be supplied as a promise or a raw number. Default: + * contract's current `persistent` storage expiration date/ledger + * number/block. + */ + expiration = this.getStorageExpiration()) => { + if (!this.raw) + throw new Error('Transaction has not yet been assembled or simulated'); + const needsNonInvokerSigningBy = await this.needsNonInvokerSigningBy(); + if (!needsNonInvokerSigningBy) + throw new NoUnsignedNonInvokerAuthEntriesError('No unsigned non-invoker auth entries; maybe you already signed?'); + const publicKey = await this.getPublicKey(); + if (!publicKey) + throw new Error('Could not get public key from wallet; maybe Freighter is not signed in?'); + if (needsNonInvokerSigningBy.indexOf(publicKey) === -1) + throw new Error(`No auth entries for public key "${publicKey}"`); + const wallet = await this.getWallet(); + const rawInvokeHostFunctionOp = this.raw + .operations[0]; + const authEntries = rawInvokeHostFunctionOp.auth ?? []; + for (const [i, entry] of authEntries.entries()) { + if (entry.credentials().switch() !== + stellar_sdk_1.xdr.SorobanCredentialsType.sorobanCredentialsAddress()) { + // if the invoker/source account, then the entry doesn't need explicit + // signature, since the tx envelope is already signed by the source + // account, so only check for sorobanCredentialsAddress + continue; + } + const pk = stellar_sdk_1.StrKey.encodeEd25519PublicKey(entry.credentials().address().address().accountId().ed25519()); + // this auth entry needs to be signed by a different account + // (or maybe already was!) + if (pk !== publicKey) + continue; + authEntries[i] = await (0, stellar_sdk_1.authorizeEntry)(entry, async (preimage) => buffer_1.Buffer.from(await wallet.signAuthEntry(preimage.toXDR('base64')), 'base64'), await expiration, this.options.networkPassphrase); + } + }; + get isReadCall() { + const authsCount = this.simulationData.result.auth.length; + const writeLength = this.simulationData.transactionData.resources().footprint().readWrite().length; + return (authsCount === 0) && (writeLength === 0); + } + hasRealInvoker = async () => { + const account = await this.getAccount(); + return account.accountId() !== exports.NULL_ACCOUNT; + }; +} +exports.AssembledTransaction = AssembledTransaction; +/** + * A transaction that has been sent to the Soroban network. This happens in two steps: + * + * 1. `sendTransaction`: initial submission of the transaction to the network. + * This step can run into problems, and will be retried with exponential + * backoff if it does. See all attempts in `sendTransactionResponseAll` and the + * most recent attempt in `sendTransactionResponse`. + * 2. `getTransaction`: once the transaction has been submitted to the network + * successfully, you need to wait for it to finalize to get the results of the + * transaction. This step can also run into problems, and will be retried with + * exponential backoff if it does. See all attempts in + * `getTransactionResponseAll` and the most recent attempt in + * `getTransactionResponse`. + */ +class SentTransaction { + options; + assembled; + server; + signed; + sendTransactionResponse; + sendTransactionResponseAll; + getTransactionResponse; + getTransactionResponseAll; + constructor(options, assembled) { + this.options = options; + this.assembled = assembled; + this.server = new stellar_sdk_1.SorobanRpc.Server(this.options.rpcUrl, { + allowHttp: this.options.rpcUrl.startsWith("http://"), + }); + this.assembled = assembled; + } + static init = async (options, assembled, secondsToWait = 10) => { + const tx = new SentTransaction(options, assembled); + return await tx.send(secondsToWait); + }; + send = async (secondsToWait = 10) => { + const wallet = await this.assembled.getWallet(); + this.sendTransactionResponseAll = await withExponentialBackoff(async (previousFailure) => { + if (previousFailure) { + // Increment transaction sequence number and resimulate before trying again + // Soroban transaction can only have 1 operation + const op = this.assembled.raw.operations[0]; + this.assembled.raw = new stellar_sdk_1.TransactionBuilder(await this.assembled.getAccount(), { + fee: this.assembled.raw.fee, + networkPassphrase: this.options.networkPassphrase, + }) + .setTimeout(stellar_sdk_1.TimeoutInfinite) + .addOperation(stellar_sdk_1.Operation.invokeHostFunction({ ...op, auth: op.auth ?? [] })) + .build(); + await this.assembled.simulate(); + } + const signature = await wallet.signTransaction(this.assembled.raw.toXDR(), { + networkPassphrase: this.options.networkPassphrase, + }); + this.signed = stellar_sdk_1.TransactionBuilder.fromXDR(signature, this.options.networkPassphrase); + return this.server.sendTransaction(this.signed); + }, resp => resp.status !== "PENDING", secondsToWait); + this.sendTransactionResponse = this.sendTransactionResponseAll[this.sendTransactionResponseAll.length - 1]; + if (this.sendTransactionResponse.status !== "PENDING") { + throw new Error(`Tried to resubmit transaction for ${secondsToWait} seconds, but it's still failing. ` + + `All attempts: ${JSON.stringify(this.sendTransactionResponseAll, null, 2)}`); + } + const { hash } = this.sendTransactionResponse; + this.getTransactionResponseAll = await withExponentialBackoff(() => this.server.getTransaction(hash), resp => resp.status === stellar_sdk_1.SorobanRpc.Api.GetTransactionStatus.NOT_FOUND, secondsToWait); + this.getTransactionResponse = this.getTransactionResponseAll[this.getTransactionResponseAll.length - 1]; + if (this.getTransactionResponse.status === stellar_sdk_1.SorobanRpc.Api.GetTransactionStatus.NOT_FOUND) { + console.error(`Waited ${secondsToWait} seconds for transaction to complete, but it did not. ` + + `Returning anyway. Check the transaction status manually. ` + + `Sent transaction: ${JSON.stringify(this.sendTransactionResponse, null, 2)}\n` + + `All attempts to get the result: ${JSON.stringify(this.getTransactionResponseAll, null, 2)}`); + } + return this; + }; + get result() { + // 1. check if transaction was submitted and awaited with `getTransaction` + if ("getTransactionResponse" in this && + this.getTransactionResponse) { + // getTransactionResponse has a `returnValue` field unless it failed + if ("returnValue" in this.getTransactionResponse) { + return this.options.parseResultXdr(this.getTransactionResponse.returnValue); + } + // if "returnValue" not present, the transaction failed; return without parsing the result + throw new Error("Transaction failed! Cannot parse result."); + } + // 2. otherwise, maybe it was merely sent with `sendTransaction` + if (this.sendTransactionResponse) { + const errorResult = this.sendTransactionResponse.errorResult?.result(); + if (errorResult) { + throw new SendFailedError(`Transaction simulation looked correct, but attempting to send the transaction failed. Check \`simulation\` and \`sendTransactionResponseAll\` to troubleshoot. Decoded \`sendTransactionResponse.errorResultXdr\`: ${errorResult}`); + } + throw new SendResultOnlyError(`Transaction was sent to the network, but not yet awaited. No result to show. Await transaction completion with \`getTransaction(sendTransactionResponse.hash)\``); + } + // 3. finally, if neither of those are present, throw an error + throw new Error(`Sending transaction failed: ${JSON.stringify(this.assembled)}`); + } +} +/** + * Keep calling a `fn` for `secondsToWait` seconds, if `keepWaitingIf` is true. + * Returns an array of all attempts to call the function. + */ +async function withExponentialBackoff(fn, keepWaitingIf, secondsToWait, exponentialFactor = 1.5, verbose = false) { + const attempts = []; + let count = 0; + attempts.push(await fn()); + if (!keepWaitingIf(attempts[attempts.length - 1])) + return attempts; + const waitUntil = new Date(Date.now() + secondsToWait * 1000).valueOf(); + let waitTime = 1000; + let totalWaitTime = waitTime; + while (Date.now() < waitUntil && keepWaitingIf(attempts[attempts.length - 1])) { + count++; + // Wait a beat + if (verbose) { + console.info(`Waiting ${waitTime}ms before trying again (bringing the total wait time to ${totalWaitTime}ms so far, of total ${secondsToWait * 1000}ms)`); + } + await new Promise(res => setTimeout(res, waitTime)); + // Exponential backoff + waitTime = waitTime * exponentialFactor; + if (new Date(Date.now() + waitTime).valueOf() > waitUntil) { + waitTime = waitUntil - Date.now(); + if (verbose) { + console.info(`was gonna wait too long; new waitTime: ${waitTime}ms`); + } + } + totalWaitTime = waitTime + totalWaitTime; + // Try again + attempts.push(await fn(attempts[attempts.length - 1])); + if (verbose && keepWaitingIf(attempts[attempts.length - 1])) { + console.info(`${count}. Called ${fn}; ${attempts.length} prev attempts. Most recent: ${JSON.stringify(attempts[attempts.length - 1], null, 2)}`); + } + } + return attempts; +} diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/cjs/index.d.ts b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/cjs/index.d.ts new file mode 100644 index 00000000..66d3d595 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/cjs/index.d.ts @@ -0,0 +1,424 @@ +import { ContractSpec } from '@stellar/stellar-sdk'; +import { Buffer } from "buffer"; +import { AssembledTransaction, Ok, Err } from './assembled-tx.js'; +import type { u32, i32, i64, i128, Option, Error_ } from './assembled-tx.js'; +import type { ClassOptions } from './method-options.js'; +export * from './assembled-tx.js'; +export * from './method-options.js'; +export declare const networks: { + readonly futurenet: { + readonly networkPassphrase: "Test SDF Future Network ; October 2022"; + readonly contractId: "CBYMYMSDF6FBDNCFJCRC7KMO4REYFPOH2U4N7FXI3GJO6YXNCQ43CDSK"; + }; +}; +/** + This is from the rust doc above the struct Test + */ +export interface Test { + /** + + */ + a: u32; + /** + + */ + b: boolean; + /** + + */ + c: string; +} +/** + + */ +export type SimpleEnum = { + tag: "First"; + values: void; +} | { + tag: "Second"; + values: void; +} | { + tag: "Third"; + values: void; +}; +/** + + */ +export declare enum RoyalCard { + Jack = 11, + Queen = 12, + King = 13 +} +/** + + */ +export type TupleStruct = readonly [Test, SimpleEnum]; +/** + + */ +export type ComplexEnum = { + tag: "Struct"; + values: readonly [Test]; +} | { + tag: "Tuple"; + values: readonly [TupleStruct]; +} | { + tag: "Enum"; + values: readonly [SimpleEnum]; +} | { + tag: "Asset"; + values: readonly [string, i128]; +} | { + tag: "Void"; + values: void; +}; +/** + + */ +export declare const Errors: { + 1: { + message: string; + }; +}; +export declare class Contract { + readonly options: ClassOptions; + spec: ContractSpec; + constructor(options: ClassOptions); + private readonly parsers; + private txFromJSON; + readonly fromJSON: { + hello: (json: string) => AssembledTransaction; + woid: (json: string) => AssembledTransaction; + val: (json: string) => AssembledTransaction; + u32FailOnEven: (json: string) => AssembledTransaction | Ok>; + u32: (json: string) => AssembledTransaction; + i32: (json: string) => AssembledTransaction; + i64: (json: string) => AssembledTransaction; + struktHel: (json: string) => AssembledTransaction; + strukt: (json: string) => AssembledTransaction; + simple: (json: string) => AssembledTransaction; + complex: (json: string) => AssembledTransaction; + addresse: (json: string) => AssembledTransaction; + bytes: (json: string) => AssembledTransaction; + bytesN: (json: string) => AssembledTransaction; + card: (json: string) => AssembledTransaction; + boolean: (json: string) => AssembledTransaction; + not: (json: string) => AssembledTransaction; + i128: (json: string) => AssembledTransaction; + u128: (json: string) => AssembledTransaction; + multiArgs: (json: string) => AssembledTransaction; + map: (json: string) => AssembledTransaction>; + vec: (json: string) => AssembledTransaction; + tuple: (json: string) => AssembledTransaction; + option: (json: string) => AssembledTransaction>; + u256: (json: string) => AssembledTransaction; + i256: (json: string) => AssembledTransaction; + string: (json: string) => AssembledTransaction; + tupleStrukt: (json: string) => AssembledTransaction; + }; + /** +* Construct and simulate a hello transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + hello: ({ hello }: { + hello: string; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a woid transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + woid: (options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a val transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + val: (options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a u32_fail_on_even transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + u32FailOnEven: ({ u32_ }: { + u32_: u32; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise | Ok>>; + /** +* Construct and simulate a u32_ transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + u32: ({ u32_ }: { + u32_: u32; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a i32_ transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + i32: ({ i32_ }: { + i32_: i32; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a i64_ transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + i64: ({ i64_ }: { + i64_: i64; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a strukt_hel transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object.Example contract method which takes a struct +*/ + struktHel: ({ strukt }: { + strukt: Test; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a strukt transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + strukt: ({ strukt }: { + strukt: Test; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a simple transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + simple: ({ simple }: { + simple: SimpleEnum; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a complex transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + complex: ({ complex }: { + complex: ComplexEnum; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a addresse transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + addresse: ({ addresse }: { + addresse: string; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a bytes transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + bytes: ({ bytes }: { + bytes: Buffer; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a bytes_n transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + bytesN: ({ bytes_n }: { + bytes_n: Buffer; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a card transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + card: ({ card }: { + card: RoyalCard; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a boolean transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + boolean: ({ boolean }: { + boolean: boolean; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a not transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object.Negates a boolean value +*/ + not: ({ boolean }: { + boolean: boolean; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a i128 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + i128: ({ i128 }: { + i128: bigint; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a u128 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + u128: ({ u128 }: { + u128: bigint; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a multi_args transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + multiArgs: ({ a, b }: { + a: u32; + b: boolean; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a map transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + map: ({ map }: { + map: Map; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>>; + /** +* Construct and simulate a vec transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + vec: ({ vec }: { + vec: Array; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a tuple transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + tuple: ({ tuple }: { + tuple: readonly [string, u32]; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a option transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object.Example of an optional argument +*/ + option: ({ option }: { + option: Option; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>>; + /** +* Construct and simulate a u256 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + u256: ({ u256 }: { + u256: bigint; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a i256 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + i256: ({ i256 }: { + i256: bigint; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a string transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + string: ({ string }: { + string: string; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a tuple_strukt transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + tupleStrukt: ({ tuple_strukt }: { + tuple_strukt: TupleStruct; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; +} diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/cjs/index.js b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/cjs/index.js new file mode 100644 index 00000000..d6e27aa8 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/cjs/index.js @@ -0,0 +1,527 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Contract = exports.Errors = exports.RoyalCard = exports.networks = void 0; +const stellar_sdk_1 = require("@stellar/stellar-sdk"); +const buffer_1 = require("buffer"); +const assembled_tx_js_1 = require("./assembled-tx.js"); +__exportStar(require("./assembled-tx.js"), exports); +__exportStar(require("./method-options.js"), exports); +if (typeof window !== 'undefined') { + //@ts-ignore Buffer exists + window.Buffer = window.Buffer || buffer_1.Buffer; +} +exports.networks = { + futurenet: { + networkPassphrase: "Test SDF Future Network ; October 2022", + contractId: "CBYMYMSDF6FBDNCFJCRC7KMO4REYFPOH2U4N7FXI3GJO6YXNCQ43CDSK", + } +}; +/** + + */ +var RoyalCard; +(function (RoyalCard) { + RoyalCard[RoyalCard["Jack"] = 11] = "Jack"; + RoyalCard[RoyalCard["Queen"] = 12] = "Queen"; + RoyalCard[RoyalCard["King"] = 13] = "King"; +})(RoyalCard || (exports.RoyalCard = RoyalCard = {})); +/** + + */ +exports.Errors = { + 1: { message: "Please provide an odd number" } +}; +class Contract { + options; + spec; + constructor(options) { + this.options = options; + this.spec = new stellar_sdk_1.ContractSpec([ + "AAAAAQAAAC9UaGlzIGlzIGZyb20gdGhlIHJ1c3QgZG9jIGFib3ZlIHRoZSBzdHJ1Y3QgVGVzdAAAAAAAAAAABFRlc3QAAAADAAAAAAAAAAFhAAAAAAAABAAAAAAAAAABYgAAAAAAAAEAAAAAAAAAAWMAAAAAAAAR", + "AAAAAgAAAAAAAAAAAAAAClNpbXBsZUVudW0AAAAAAAMAAAAAAAAAAAAAAAVGaXJzdAAAAAAAAAAAAAAAAAAABlNlY29uZAAAAAAAAAAAAAAAAAAFVGhpcmQAAAA=", + "AAAAAwAAAAAAAAAAAAAACVJveWFsQ2FyZAAAAAAAAAMAAAAAAAAABEphY2sAAAALAAAAAAAAAAVRdWVlbgAAAAAAAAwAAAAAAAAABEtpbmcAAAAN", + "AAAAAQAAAAAAAAAAAAAAC1R1cGxlU3RydWN0AAAAAAIAAAAAAAAAATAAAAAAAAfQAAAABFRlc3QAAAAAAAAAATEAAAAAAAfQAAAAClNpbXBsZUVudW0AAA==", + "AAAAAgAAAAAAAAAAAAAAC0NvbXBsZXhFbnVtAAAAAAUAAAABAAAAAAAAAAZTdHJ1Y3QAAAAAAAEAAAfQAAAABFRlc3QAAAABAAAAAAAAAAVUdXBsZQAAAAAAAAEAAAfQAAAAC1R1cGxlU3RydWN0AAAAAAEAAAAAAAAABEVudW0AAAABAAAH0AAAAApTaW1wbGVFbnVtAAAAAAABAAAAAAAAAAVBc3NldAAAAAAAAAIAAAATAAAACwAAAAAAAAAAAAAABFZvaWQ=", + "AAAABAAAAAAAAAAAAAAABUVycm9yAAAAAAAAAQAAABxQbGVhc2UgcHJvdmlkZSBhbiBvZGQgbnVtYmVyAAAAD051bWJlck11c3RCZU9kZAAAAAAB", + "AAAAAAAAAAAAAAAFaGVsbG8AAAAAAAABAAAAAAAAAAVoZWxsbwAAAAAAABEAAAABAAAAEQ==", + "AAAAAAAAAAAAAAAEd29pZAAAAAAAAAAA", + "AAAAAAAAAAAAAAADdmFsAAAAAAAAAAABAAAAAA==", + "AAAAAAAAAAAAAAAQdTMyX2ZhaWxfb25fZXZlbgAAAAEAAAAAAAAABHUzMl8AAAAEAAAAAQAAA+kAAAAEAAAAAw==", + "AAAAAAAAAAAAAAAEdTMyXwAAAAEAAAAAAAAABHUzMl8AAAAEAAAAAQAAAAQ=", + "AAAAAAAAAAAAAAAEaTMyXwAAAAEAAAAAAAAABGkzMl8AAAAFAAAAAQAAAAU=", + "AAAAAAAAAAAAAAAEaTY0XwAAAAEAAAAAAAAABGk2NF8AAAAHAAAAAQAAAAc=", + "AAAAAAAAACxFeGFtcGxlIGNvbnRyYWN0IG1ldGhvZCB3aGljaCB0YWtlcyBhIHN0cnVjdAAAAApzdHJ1a3RfaGVsAAAAAAABAAAAAAAAAAZzdHJ1a3QAAAAAB9AAAAAEVGVzdAAAAAEAAAPqAAAAEQ==", + "AAAAAAAAAAAAAAAGc3RydWt0AAAAAAABAAAAAAAAAAZzdHJ1a3QAAAAAB9AAAAAEVGVzdAAAAAEAAAfQAAAABFRlc3Q=", + "AAAAAAAAAAAAAAAGc2ltcGxlAAAAAAABAAAAAAAAAAZzaW1wbGUAAAAAB9AAAAAKU2ltcGxlRW51bQAAAAAAAQAAB9AAAAAKU2ltcGxlRW51bQAA", + "AAAAAAAAAAAAAAAHY29tcGxleAAAAAABAAAAAAAAAAdjb21wbGV4AAAAB9AAAAALQ29tcGxleEVudW0AAAAAAQAAB9AAAAALQ29tcGxleEVudW0A", + "AAAAAAAAAAAAAAAIYWRkcmVzc2UAAAABAAAAAAAAAAhhZGRyZXNzZQAAABMAAAABAAAAEw==", + "AAAAAAAAAAAAAAAFYnl0ZXMAAAAAAAABAAAAAAAAAAVieXRlcwAAAAAAAA4AAAABAAAADg==", + "AAAAAAAAAAAAAAAHYnl0ZXNfbgAAAAABAAAAAAAAAAdieXRlc19uAAAAA+4AAAAJAAAAAQAAA+4AAAAJ", + "AAAAAAAAAAAAAAAEY2FyZAAAAAEAAAAAAAAABGNhcmQAAAfQAAAACVJveWFsQ2FyZAAAAAAAAAEAAAfQAAAACVJveWFsQ2FyZAAAAA==", + "AAAAAAAAAAAAAAAHYm9vbGVhbgAAAAABAAAAAAAAAAdib29sZWFuAAAAAAEAAAABAAAAAQ==", + "AAAAAAAAABdOZWdhdGVzIGEgYm9vbGVhbiB2YWx1ZQAAAAADbm90AAAAAAEAAAAAAAAAB2Jvb2xlYW4AAAAAAQAAAAEAAAAB", + "AAAAAAAAAAAAAAAEaTEyOAAAAAEAAAAAAAAABGkxMjgAAAALAAAAAQAAAAs=", + "AAAAAAAAAAAAAAAEdTEyOAAAAAEAAAAAAAAABHUxMjgAAAAKAAAAAQAAAAo=", + "AAAAAAAAAAAAAAAKbXVsdGlfYXJncwAAAAAAAgAAAAAAAAABYQAAAAAAAAQAAAAAAAAAAWIAAAAAAAABAAAAAQAAAAQ=", + "AAAAAAAAAAAAAAADbWFwAAAAAAEAAAAAAAAAA21hcAAAAAPsAAAABAAAAAEAAAABAAAD7AAAAAQAAAAB", + "AAAAAAAAAAAAAAADdmVjAAAAAAEAAAAAAAAAA3ZlYwAAAAPqAAAABAAAAAEAAAPqAAAABA==", + "AAAAAAAAAAAAAAAFdHVwbGUAAAAAAAABAAAAAAAAAAV0dXBsZQAAAAAAA+0AAAACAAAAEQAAAAQAAAABAAAD7QAAAAIAAAARAAAABA==", + "AAAAAAAAAB9FeGFtcGxlIG9mIGFuIG9wdGlvbmFsIGFyZ3VtZW50AAAAAAZvcHRpb24AAAAAAAEAAAAAAAAABm9wdGlvbgAAAAAD6AAAAAQAAAABAAAD6AAAAAQ=", + "AAAAAAAAAAAAAAAEdTI1NgAAAAEAAAAAAAAABHUyNTYAAAAMAAAAAQAAAAw=", + "AAAAAAAAAAAAAAAEaTI1NgAAAAEAAAAAAAAABGkyNTYAAAANAAAAAQAAAA0=", + "AAAAAAAAAAAAAAAGc3RyaW5nAAAAAAABAAAAAAAAAAZzdHJpbmcAAAAAABAAAAABAAAAEA==", + "AAAAAAAAAAAAAAAMdHVwbGVfc3RydWt0AAAAAQAAAAAAAAAMdHVwbGVfc3RydWt0AAAH0AAAAAtUdXBsZVN0cnVjdAAAAAABAAAH0AAAAAtUdXBsZVN0cnVjdAA=" + ]); + } + parsers = { + hello: (result) => this.spec.funcResToNative("hello", result), + woid: () => { }, + val: (result) => this.spec.funcResToNative("val", result), + u32FailOnEven: (result) => { + if (result instanceof assembled_tx_js_1.Err) + return result; + return new assembled_tx_js_1.Ok(this.spec.funcResToNative("u32_fail_on_even", result)); + }, + u32: (result) => this.spec.funcResToNative("u32_", result), + i32: (result) => this.spec.funcResToNative("i32_", result), + i64: (result) => this.spec.funcResToNative("i64_", result), + struktHel: (result) => this.spec.funcResToNative("strukt_hel", result), + strukt: (result) => this.spec.funcResToNative("strukt", result), + simple: (result) => this.spec.funcResToNative("simple", result), + complex: (result) => this.spec.funcResToNative("complex", result), + addresse: (result) => this.spec.funcResToNative("addresse", result), + bytes: (result) => this.spec.funcResToNative("bytes", result), + bytesN: (result) => this.spec.funcResToNative("bytes_n", result), + card: (result) => this.spec.funcResToNative("card", result), + boolean: (result) => this.spec.funcResToNative("boolean", result), + not: (result) => this.spec.funcResToNative("not", result), + i128: (result) => this.spec.funcResToNative("i128", result), + u128: (result) => this.spec.funcResToNative("u128", result), + multiArgs: (result) => this.spec.funcResToNative("multi_args", result), + map: (result) => this.spec.funcResToNative("map", result), + vec: (result) => this.spec.funcResToNative("vec", result), + tuple: (result) => this.spec.funcResToNative("tuple", result), + option: (result) => this.spec.funcResToNative("option", result), + u256: (result) => this.spec.funcResToNative("u256", result), + i256: (result) => this.spec.funcResToNative("i256", result), + string: (result) => this.spec.funcResToNative("string", result), + tupleStrukt: (result) => this.spec.funcResToNative("tuple_strukt", result) + }; + txFromJSON = (json) => { + const { method, ...tx } = JSON.parse(json); + return assembled_tx_js_1.AssembledTransaction.fromJSON({ + ...this.options, + method, + parseResultXdr: this.parsers[method], + }, tx); + }; + fromJSON = { + hello: (this.txFromJSON), + woid: (this.txFromJSON), + val: (this.txFromJSON), + u32FailOnEven: (this.txFromJSON), + u32: (this.txFromJSON), + i32: (this.txFromJSON), + i64: (this.txFromJSON), + struktHel: (this.txFromJSON), + strukt: (this.txFromJSON), + simple: (this.txFromJSON), + complex: (this.txFromJSON), + addresse: (this.txFromJSON), + bytes: (this.txFromJSON), + bytesN: (this.txFromJSON), + card: (this.txFromJSON), + boolean: (this.txFromJSON), + not: (this.txFromJSON), + i128: (this.txFromJSON), + u128: (this.txFromJSON), + multiArgs: (this.txFromJSON), + map: (this.txFromJSON), + vec: (this.txFromJSON), + tuple: (this.txFromJSON), + option: (this.txFromJSON), + u256: (this.txFromJSON), + i256: (this.txFromJSON), + string: (this.txFromJSON), + tupleStrukt: (this.txFromJSON) + }; + /** +* Construct and simulate a hello transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + hello = async ({ hello }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'hello', + args: this.spec.funcArgsToScVals("hello", { hello }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['hello'], + }); + }; + /** +* Construct and simulate a woid transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + woid = async (options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'woid', + args: this.spec.funcArgsToScVals("woid", {}), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['woid'], + }); + }; + /** +* Construct and simulate a val transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + val = async (options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'val', + args: this.spec.funcArgsToScVals("val", {}), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['val'], + }); + }; + /** +* Construct and simulate a u32_fail_on_even transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + u32FailOnEven = async ({ u32_ }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'u32_fail_on_even', + args: this.spec.funcArgsToScVals("u32_fail_on_even", { u32_ }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['u32FailOnEven'], + }); + }; + /** +* Construct and simulate a u32_ transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + u32 = async ({ u32_ }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'u32_', + args: this.spec.funcArgsToScVals("u32_", { u32_ }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['u32'], + }); + }; + /** +* Construct and simulate a i32_ transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + i32 = async ({ i32_ }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'i32_', + args: this.spec.funcArgsToScVals("i32_", { i32_ }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['i32'], + }); + }; + /** +* Construct and simulate a i64_ transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + i64 = async ({ i64_ }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'i64_', + args: this.spec.funcArgsToScVals("i64_", { i64_ }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['i64'], + }); + }; + /** +* Construct and simulate a strukt_hel transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object.Example contract method which takes a struct +*/ + struktHel = async ({ strukt }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'strukt_hel', + args: this.spec.funcArgsToScVals("strukt_hel", { strukt }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['struktHel'], + }); + }; + /** +* Construct and simulate a strukt transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + strukt = async ({ strukt }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'strukt', + args: this.spec.funcArgsToScVals("strukt", { strukt }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['strukt'], + }); + }; + /** +* Construct and simulate a simple transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + simple = async ({ simple }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'simple', + args: this.spec.funcArgsToScVals("simple", { simple }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['simple'], + }); + }; + /** +* Construct and simulate a complex transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + complex = async ({ complex }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'complex', + args: this.spec.funcArgsToScVals("complex", { complex }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['complex'], + }); + }; + /** +* Construct and simulate a addresse transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + addresse = async ({ addresse }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'addresse', + args: this.spec.funcArgsToScVals("addresse", { addresse: new stellar_sdk_1.Address(addresse) }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['addresse'], + }); + }; + /** +* Construct and simulate a bytes transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + bytes = async ({ bytes }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'bytes', + args: this.spec.funcArgsToScVals("bytes", { bytes }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['bytes'], + }); + }; + /** +* Construct and simulate a bytes_n transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + bytesN = async ({ bytes_n }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'bytes_n', + args: this.spec.funcArgsToScVals("bytes_n", { bytes_n }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['bytesN'], + }); + }; + /** +* Construct and simulate a card transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + card = async ({ card }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'card', + args: this.spec.funcArgsToScVals("card", { card }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['card'], + }); + }; + /** +* Construct and simulate a boolean transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + boolean = async ({ boolean }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'boolean', + args: this.spec.funcArgsToScVals("boolean", { boolean }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['boolean'], + }); + }; + /** +* Construct and simulate a not transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object.Negates a boolean value +*/ + not = async ({ boolean }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'not', + args: this.spec.funcArgsToScVals("not", { boolean }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['not'], + }); + }; + /** +* Construct and simulate a i128 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + i128 = async ({ i128 }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'i128', + args: this.spec.funcArgsToScVals("i128", { i128 }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['i128'], + }); + }; + /** +* Construct and simulate a u128 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + u128 = async ({ u128 }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'u128', + args: this.spec.funcArgsToScVals("u128", { u128 }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['u128'], + }); + }; + /** +* Construct and simulate a multi_args transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + multiArgs = async ({ a, b }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'multi_args', + args: this.spec.funcArgsToScVals("multi_args", { a, b }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['multiArgs'], + }); + }; + /** +* Construct and simulate a map transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + map = async ({ map }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'map', + args: this.spec.funcArgsToScVals("map", { map }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['map'], + }); + }; + /** +* Construct and simulate a vec transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + vec = async ({ vec }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'vec', + args: this.spec.funcArgsToScVals("vec", { vec }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['vec'], + }); + }; + /** +* Construct and simulate a tuple transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + tuple = async ({ tuple }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'tuple', + args: this.spec.funcArgsToScVals("tuple", { tuple }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['tuple'], + }); + }; + /** +* Construct and simulate a option transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object.Example of an optional argument +*/ + option = async ({ option }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'option', + args: this.spec.funcArgsToScVals("option", { option }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['option'], + }); + }; + /** +* Construct and simulate a u256 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + u256 = async ({ u256 }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'u256', + args: this.spec.funcArgsToScVals("u256", { u256 }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['u256'], + }); + }; + /** +* Construct and simulate a i256 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + i256 = async ({ i256 }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'i256', + args: this.spec.funcArgsToScVals("i256", { i256 }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['i256'], + }); + }; + /** +* Construct and simulate a string transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + string = async ({ string }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'string', + args: this.spec.funcArgsToScVals("string", { string }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['string'], + }); + }; + /** +* Construct and simulate a tuple_strukt transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + tupleStrukt = async ({ tuple_strukt }, options = {}) => { + return await assembled_tx_js_1.AssembledTransaction.fromSimulation({ + method: 'tuple_strukt', + args: this.spec.funcArgsToScVals("tuple_strukt", { tuple_strukt }), + ...options, + ...this.options, + errorTypes: exports.Errors, + parseResultXdr: this.parsers['tupleStrukt'], + }); + }; +} +exports.Contract = Contract; diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/cjs/method-options.d.ts b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/cjs/method-options.d.ts new file mode 100644 index 00000000..fc6b21d5 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/cjs/method-options.d.ts @@ -0,0 +1,47 @@ +declare let responseTypes: 'simulated' | 'full' | undefined; +export type ResponseTypes = typeof responseTypes; +export type XDR_BASE64 = string; +export interface Wallet { + isConnected: () => Promise; + isAllowed: () => Promise; + getUserInfo: () => Promise<{ + publicKey?: string; + }>; + signTransaction: (tx: XDR_BASE64, opts?: { + network?: string; + networkPassphrase?: string; + accountToSign?: string; + }) => Promise; + signAuthEntry: (entryXdr: XDR_BASE64, opts?: { + accountToSign?: string; + }) => Promise; +} +export type ClassOptions = { + contractId: string; + networkPassphrase: string; + rpcUrl: string; + errorTypes?: Record; + /** + * A Wallet interface, such as Freighter, that has the methods `isConnected`, `isAllowed`, `getUserInfo`, and `signTransaction`. If not provided, will attempt to import and use Freighter. Example: + * + * @example + * ```ts + * import freighter from "@stellar/freighter-api"; + * import { Contract } from "test_custom_types"; + * const contract = new Contract({ + * …, + * wallet: freighter, + * }) + * ``` + */ + wallet?: Wallet; +}; +export type MethodOptions = { + /** + * The fee to pay for the transaction. Default: soroban-sdk's BASE_FEE ('100') + */ + fee?: number; +}; +export {}; diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/cjs/method-options.js b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/cjs/method-options.js new file mode 100644 index 00000000..6d483ac3 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/cjs/method-options.js @@ -0,0 +1,4 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +// defined this way so typeahead shows full union, not named alias +let responseTypes; diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/assembled-tx.d.ts b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/assembled-tx.d.ts new file mode 100644 index 00000000..1d5e6f5e --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/assembled-tx.d.ts @@ -0,0 +1,184 @@ +import { Account, Address, Operation, SorobanRpc, xdr } from "@stellar/stellar-sdk"; +import type { Memo, MemoType, Transaction } from "@stellar/stellar-sdk"; +import type { ClassOptions, MethodOptions, Wallet, XDR_BASE64 } from "./method-options.js"; +export type Tx = Transaction, Operation[]>; +export declare class ExpiredStateError extends Error { +} +export declare class NeedsMoreSignaturesError extends Error { +} +export declare class WalletDisconnectedError extends Error { +} +export declare class SendResultOnlyError extends Error { +} +export declare class SendFailedError extends Error { +} +export declare class NoUnsignedNonInvokerAuthEntriesError extends Error { +} +type SendTx = SorobanRpc.Api.SendTransactionResponse; +type GetTx = SorobanRpc.Api.GetTransactionResponse; +export type u32 = number; +export type i32 = number; +export type u64 = bigint; +export type i64 = bigint; +export type u128 = bigint; +export type i128 = bigint; +export type u256 = bigint; +export type i256 = bigint; +export type Option = T | undefined; +export type Typepoint = bigint; +export type Duration = bigint; +export { Address }; +export interface Error_ { + message: string; +} +export interface Result { + unwrap(): T; + unwrapErr(): E; + isOk(): boolean; + isErr(): boolean; +} +export declare class Ok implements Result { + readonly value: T; + constructor(value: T); + unwrapErr(): E; + unwrap(): T; + isOk(): boolean; + isErr(): boolean; +} +export declare class Err implements Result { + readonly error: E; + constructor(error: E); + unwrapErr(): E; + unwrap(): never; + isOk(): boolean; + isErr(): boolean; +} +export declare const contractErrorPattern: RegExp; +type AssembledTransactionOptions = MethodOptions & ClassOptions & { + method: string; + args?: any[]; + parseResultXdr: (xdr: string | xdr.ScVal | Err) => T; +}; +export declare const NULL_ACCOUNT = "GAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWHF"; +export declare class AssembledTransaction { + options: AssembledTransactionOptions; + raw: Tx; + private simulation?; + private simulationResult?; + private simulationTransactionData?; + private server; + toJSON(): string; + static fromJSON(options: Omit, 'args'>, { tx, simulationResult, simulationTransactionData }: { + tx: XDR_BASE64; + simulationResult: { + auth: XDR_BASE64[]; + retval: XDR_BASE64; + }; + simulationTransactionData: XDR_BASE64; + }): AssembledTransaction; + private constructor(); + static fromSimulation(options: AssembledTransactionOptions): Promise>; + simulate: () => Promise; + get simulationData(): { + result: SorobanRpc.Api.SimulateHostFunctionResult; + transactionData: xdr.SorobanTransactionData; + }; + get result(): T; + parseError(errorMessage: string): Err | undefined; + getWallet: () => Promise; + getPublicKey: () => Promise; + /** + * Get account details from the Soroban network for the publicKey currently + * selected in user's wallet. If not connected to Freighter, use placeholder + * null account. + */ + getAccount: () => Promise; + /** + * Sign the transaction with the `wallet` (default Freighter), then send to + * the network and return a `SentTransaction` that keeps track of all the + * attempts to send and fetch the transaction from the network. + */ + signAndSend: ({ secondsToWait, force }?: { + /** + * Wait `secondsToWait` seconds (default: 10) for both the transaction to SEND successfully (will keep trying if the server returns `TRY_AGAIN_LATER`), as well as for the transaction to COMPLETE (will keep checking if the server returns `PENDING`). + */ + secondsToWait?: number | undefined; + /** + * If `true`, sign and send the transaction even if it is a read call. + */ + force?: boolean | undefined; + }) => Promise>; + getStorageExpiration: () => Promise; + /** + * Get a list of accounts, other than the invoker of the simulation, that + * need to sign auth entries in this transaction. + * + * Soroban allows multiple people to sign a transaction. Someone needs to + * sign the final transaction envelope; this person/account is called the + * _invoker_, or _source_. Other accounts might need to sign individual auth + * entries in the transaction, if they're not also the invoker. + * + * This function returns a list of accounts that need to sign auth entries, + * assuming that the same invoker/source account will sign the final + * transaction envelope as signed the initial simulation. + * + * One at a time, for each public key in this array, you will need to + * serialize this transaction with `toJSON`, send to the owner of that key, + * deserialize the transaction with `txFromJson`, and call + * {@link signAuthEntries}. Then re-serialize and send to the next account + * in this list. + */ + needsNonInvokerSigningBy: ({ includeAlreadySigned, }?: { + /** + * Whether or not to include auth entries that have already been signed. Default: false + */ + includeAlreadySigned?: boolean | undefined; + }) => Promise; + preImageFor(entry: xdr.SorobanAuthorizationEntry, signatureExpirationLedger: number): xdr.HashIdPreimage; + /** + * If {@link needsNonInvokerSigningBy} returns a non-empty list, you can serialize + * the transaction with `toJSON`, send it to the owner of one of the public keys + * in the map, deserialize with `txFromJSON`, and call this method on their + * machine. Internally, this will use `signAuthEntry` function from connected + * `wallet` for each. + * + * Then, re-serialize the transaction and either send to the next + * `needsNonInvokerSigningBy` owner, or send it back to the original account + * who simulated the transaction so they can {@link sign} the transaction + * envelope and {@link send} it to the network. + * + * Sending to all `needsNonInvokerSigningBy` owners in parallel is not currently + * supported! + */ + signAuthEntries: (expiration?: number | Promise) => Promise; + get isReadCall(): boolean; + hasRealInvoker: () => Promise; +} +/** + * A transaction that has been sent to the Soroban network. This happens in two steps: + * + * 1. `sendTransaction`: initial submission of the transaction to the network. + * This step can run into problems, and will be retried with exponential + * backoff if it does. See all attempts in `sendTransactionResponseAll` and the + * most recent attempt in `sendTransactionResponse`. + * 2. `getTransaction`: once the transaction has been submitted to the network + * successfully, you need to wait for it to finalize to get the results of the + * transaction. This step can also run into problems, and will be retried with + * exponential backoff if it does. See all attempts in + * `getTransactionResponseAll` and the most recent attempt in + * `getTransactionResponse`. + */ +declare class SentTransaction { + options: AssembledTransactionOptions; + assembled: AssembledTransaction; + server: SorobanRpc.Server; + signed: Tx; + sendTransactionResponse?: SendTx; + sendTransactionResponseAll?: SendTx[]; + getTransactionResponse?: GetTx; + getTransactionResponseAll?: GetTx[]; + constructor(options: AssembledTransactionOptions, assembled: AssembledTransaction); + static init: (options: AssembledTransactionOptions, assembled: AssembledTransaction, secondsToWait?: number) => Promise>; + private send; + get result(): T; +} diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/assembled-tx.js b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/assembled-tx.js new file mode 100644 index 00000000..79b7c01d --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/assembled-tx.js @@ -0,0 +1,450 @@ +import { Account, Address, Contract, Operation, SorobanRpc, StrKey, TimeoutInfinite, TransactionBuilder, authorizeEntry, hash, xdr, BASE_FEE, } from "@stellar/stellar-sdk"; +import { Buffer } from "buffer"; +export class ExpiredStateError extends Error { +} +export class NeedsMoreSignaturesError extends Error { +} +export class WalletDisconnectedError extends Error { +} +export class SendResultOnlyError extends Error { +} +export class SendFailedError extends Error { +} +export class NoUnsignedNonInvokerAuthEntriesError extends Error { +} +export { Address }; +; +; +export class Ok { + value; + constructor(value) { + this.value = value; + } + unwrapErr() { + throw new Error('No error'); + } + unwrap() { + return this.value; + } + isOk() { + return true; + } + isErr() { + return !this.isOk(); + } +} +export class Err { + error; + constructor(error) { + this.error = error; + } + unwrapErr() { + return this.error; + } + unwrap() { + throw new Error(this.error.message); + } + isOk() { + return false; + } + isErr() { + return !this.isOk(); + } +} +export const contractErrorPattern = /Error\(Contract, #(\d+)\)/; +export const NULL_ACCOUNT = "GAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWHF"; +export class AssembledTransaction { + options; + raw; + simulation; + simulationResult; + simulationTransactionData; + server; + toJSON() { + return JSON.stringify({ + method: this.options.method, + tx: this.raw?.toXDR(), + simulationResult: { + auth: this.simulationData.result.auth.map(a => a.toXDR('base64')), + retval: this.simulationData.result.retval.toXDR('base64'), + }, + simulationTransactionData: this.simulationData.transactionData.toXDR('base64'), + }); + } + static fromJSON(options, { tx, simulationResult, simulationTransactionData }) { + const txn = new AssembledTransaction(options); + txn.raw = TransactionBuilder.fromXDR(tx, options.networkPassphrase); + txn.simulationResult = { + auth: simulationResult.auth.map(a => xdr.SorobanAuthorizationEntry.fromXDR(a, 'base64')), + retval: xdr.ScVal.fromXDR(simulationResult.retval, 'base64'), + }; + txn.simulationTransactionData = xdr.SorobanTransactionData.fromXDR(simulationTransactionData, 'base64'); + return txn; + } + constructor(options) { + this.options = options; + this.server = new SorobanRpc.Server(this.options.rpcUrl, { + allowHttp: this.options.rpcUrl.startsWith("http://"), + }); + } + static async fromSimulation(options) { + const tx = new AssembledTransaction(options); + const contract = new Contract(options.contractId); + tx.raw = new TransactionBuilder(await tx.getAccount(), { + fee: options.fee?.toString(10) ?? BASE_FEE, + networkPassphrase: options.networkPassphrase, + }) + .addOperation(contract.call(options.method, ...(options.args ?? []))) + .setTimeout(TimeoutInfinite) + .build(); + return await tx.simulate(); + } + simulate = async () => { + if (!this.raw) + throw new Error('Transaction has not yet been assembled'); + this.simulation = await this.server.simulateTransaction(this.raw); + if (SorobanRpc.Api.isSimulationSuccess(this.simulation)) { + this.raw = SorobanRpc.assembleTransaction(this.raw, this.simulation).build(); + } + return this; + }; + get simulationData() { + if (this.simulationResult && this.simulationTransactionData) { + return { + result: this.simulationResult, + transactionData: this.simulationTransactionData, + }; + } + // else, we know we just did the simulation on this machine + const simulation = this.simulation; + if (SorobanRpc.Api.isSimulationError(simulation)) { + throw new Error(`Transaction simulation failed: "${simulation.error}"`); + } + if (SorobanRpc.Api.isSimulationRestore(simulation)) { + throw new ExpiredStateError(`You need to restore some contract state before you can invoke this method. ${JSON.stringify(simulation, null, 2)}`); + } + if (!simulation.result) { + throw new Error(`Expected an invocation simulation, but got no 'result' field. Simulation: ${JSON.stringify(simulation, null, 2)}`); + } + // add to object for serialization & deserialization + this.simulationResult = simulation.result; + this.simulationTransactionData = simulation.transactionData.build(); + return { + result: this.simulationResult, + transactionData: this.simulationTransactionData, + }; + } + get result() { + try { + return this.options.parseResultXdr(this.simulationData.result.retval); + } + catch (e) { + let err = this.parseError(e.toString()); + if (err) + return err; + throw e; + } + } + parseError(errorMessage) { + if (!this.options.errorTypes) + return; + const match = errorMessage.match(contractErrorPattern); + if (!match) + return; + let i = parseInt(match[1], 10); + let err = this.options.errorTypes[i]; + if (err) + return new Err(err); + } + getWallet = async () => { + return this.options.wallet ?? (await import("@stellar/freighter-api")).default; + }; + getPublicKey = async () => { + const wallet = await this.getWallet(); + if (await wallet.isConnected() && await wallet.isAllowed()) { + return (await wallet.getUserInfo()).publicKey; + } + }; + /** + * Get account details from the Soroban network for the publicKey currently + * selected in user's wallet. If not connected to Freighter, use placeholder + * null account. + */ + getAccount = async () => { + const publicKey = await this.getPublicKey(); + return publicKey + ? await this.server.getAccount(publicKey) + : new Account(NULL_ACCOUNT, "0"); + }; + /** + * Sign the transaction with the `wallet` (default Freighter), then send to + * the network and return a `SentTransaction` that keeps track of all the + * attempts to send and fetch the transaction from the network. + */ + signAndSend = async ({ secondsToWait = 10, force = false } = {}) => { + if (!this.raw) { + throw new Error('Transaction has not yet been simulated'); + } + if (!force && this.isReadCall) { + throw new Error('This is a read call. It requires no signature or sending. Use `force: true` to sign and send anyway.'); + } + if (!await this.hasRealInvoker()) { + throw new WalletDisconnectedError('Wallet is not connected'); + } + if (this.raw.source !== (await this.getAccount()).accountId()) { + throw new Error(`You must submit the transaction with the account that originally created it. Please switch to the wallet with "${this.raw.source}" as its public key.`); + } + if ((await this.needsNonInvokerSigningBy()).length) { + throw new NeedsMoreSignaturesError('Transaction requires more signatures. See `needsNonInvokerSigningBy` for details.'); + } + return await SentTransaction.init(this.options, this, secondsToWait); + }; + getStorageExpiration = async () => { + const entryRes = await this.server.getLedgerEntries(new Contract(this.options.contractId).getFootprint()); + if (!entryRes.entries || + !entryRes.entries.length || + !entryRes.entries[0].liveUntilLedgerSeq) + throw new Error('failed to get ledger entry'); + return entryRes.entries[0].liveUntilLedgerSeq; + }; + /** + * Get a list of accounts, other than the invoker of the simulation, that + * need to sign auth entries in this transaction. + * + * Soroban allows multiple people to sign a transaction. Someone needs to + * sign the final transaction envelope; this person/account is called the + * _invoker_, or _source_. Other accounts might need to sign individual auth + * entries in the transaction, if they're not also the invoker. + * + * This function returns a list of accounts that need to sign auth entries, + * assuming that the same invoker/source account will sign the final + * transaction envelope as signed the initial simulation. + * + * One at a time, for each public key in this array, you will need to + * serialize this transaction with `toJSON`, send to the owner of that key, + * deserialize the transaction with `txFromJson`, and call + * {@link signAuthEntries}. Then re-serialize and send to the next account + * in this list. + */ + needsNonInvokerSigningBy = async ({ includeAlreadySigned = false, } = {}) => { + if (!this.raw) { + throw new Error('Transaction has not yet been simulated'); + } + // We expect that any transaction constructed by these libraries has a + // single operation, which is an InvokeHostFunction operation. The host + // function being invoked is the contract method call. + if (!("operations" in this.raw)) { + throw new Error(`Unexpected Transaction type; no operations: ${JSON.stringify(this.raw)}`); + } + const rawInvokeHostFunctionOp = this.raw + .operations[0]; + return [...new Set((rawInvokeHostFunctionOp.auth ?? []).filter(entry => entry.credentials().switch() === + xdr.SorobanCredentialsType.sorobanCredentialsAddress() && + (includeAlreadySigned || + entry.credentials().address().signature().switch().name === 'scvVoid')).map(entry => StrKey.encodeEd25519PublicKey(entry.credentials().address().address().accountId().ed25519())))]; + }; + preImageFor(entry, signatureExpirationLedger) { + const addrAuth = entry.credentials().address(); + return xdr.HashIdPreimage.envelopeTypeSorobanAuthorization(new xdr.HashIdPreimageSorobanAuthorization({ + networkId: hash(Buffer.from(this.options.networkPassphrase)), + nonce: addrAuth.nonce(), + invocation: entry.rootInvocation(), + signatureExpirationLedger, + })); + } + /** + * If {@link needsNonInvokerSigningBy} returns a non-empty list, you can serialize + * the transaction with `toJSON`, send it to the owner of one of the public keys + * in the map, deserialize with `txFromJSON`, and call this method on their + * machine. Internally, this will use `signAuthEntry` function from connected + * `wallet` for each. + * + * Then, re-serialize the transaction and either send to the next + * `needsNonInvokerSigningBy` owner, or send it back to the original account + * who simulated the transaction so they can {@link sign} the transaction + * envelope and {@link send} it to the network. + * + * Sending to all `needsNonInvokerSigningBy` owners in parallel is not currently + * supported! + */ + signAuthEntries = async ( + /** + * When to set each auth entry to expire. Could be any number of blocks in + * the future. Can be supplied as a promise or a raw number. Default: + * contract's current `persistent` storage expiration date/ledger + * number/block. + */ + expiration = this.getStorageExpiration()) => { + if (!this.raw) + throw new Error('Transaction has not yet been assembled or simulated'); + const needsNonInvokerSigningBy = await this.needsNonInvokerSigningBy(); + if (!needsNonInvokerSigningBy) + throw new NoUnsignedNonInvokerAuthEntriesError('No unsigned non-invoker auth entries; maybe you already signed?'); + const publicKey = await this.getPublicKey(); + if (!publicKey) + throw new Error('Could not get public key from wallet; maybe Freighter is not signed in?'); + if (needsNonInvokerSigningBy.indexOf(publicKey) === -1) + throw new Error(`No auth entries for public key "${publicKey}"`); + const wallet = await this.getWallet(); + const rawInvokeHostFunctionOp = this.raw + .operations[0]; + const authEntries = rawInvokeHostFunctionOp.auth ?? []; + for (const [i, entry] of authEntries.entries()) { + if (entry.credentials().switch() !== + xdr.SorobanCredentialsType.sorobanCredentialsAddress()) { + // if the invoker/source account, then the entry doesn't need explicit + // signature, since the tx envelope is already signed by the source + // account, so only check for sorobanCredentialsAddress + continue; + } + const pk = StrKey.encodeEd25519PublicKey(entry.credentials().address().address().accountId().ed25519()); + // this auth entry needs to be signed by a different account + // (or maybe already was!) + if (pk !== publicKey) + continue; + authEntries[i] = await authorizeEntry(entry, async (preimage) => Buffer.from(await wallet.signAuthEntry(preimage.toXDR('base64')), 'base64'), await expiration, this.options.networkPassphrase); + } + }; + get isReadCall() { + const authsCount = this.simulationData.result.auth.length; + const writeLength = this.simulationData.transactionData.resources().footprint().readWrite().length; + return (authsCount === 0) && (writeLength === 0); + } + hasRealInvoker = async () => { + const account = await this.getAccount(); + return account.accountId() !== NULL_ACCOUNT; + }; +} +/** + * A transaction that has been sent to the Soroban network. This happens in two steps: + * + * 1. `sendTransaction`: initial submission of the transaction to the network. + * This step can run into problems, and will be retried with exponential + * backoff if it does. See all attempts in `sendTransactionResponseAll` and the + * most recent attempt in `sendTransactionResponse`. + * 2. `getTransaction`: once the transaction has been submitted to the network + * successfully, you need to wait for it to finalize to get the results of the + * transaction. This step can also run into problems, and will be retried with + * exponential backoff if it does. See all attempts in + * `getTransactionResponseAll` and the most recent attempt in + * `getTransactionResponse`. + */ +class SentTransaction { + options; + assembled; + server; + signed; + sendTransactionResponse; + sendTransactionResponseAll; + getTransactionResponse; + getTransactionResponseAll; + constructor(options, assembled) { + this.options = options; + this.assembled = assembled; + this.server = new SorobanRpc.Server(this.options.rpcUrl, { + allowHttp: this.options.rpcUrl.startsWith("http://"), + }); + this.assembled = assembled; + } + static init = async (options, assembled, secondsToWait = 10) => { + const tx = new SentTransaction(options, assembled); + return await tx.send(secondsToWait); + }; + send = async (secondsToWait = 10) => { + const wallet = await this.assembled.getWallet(); + this.sendTransactionResponseAll = await withExponentialBackoff(async (previousFailure) => { + if (previousFailure) { + // Increment transaction sequence number and resimulate before trying again + // Soroban transaction can only have 1 operation + const op = this.assembled.raw.operations[0]; + this.assembled.raw = new TransactionBuilder(await this.assembled.getAccount(), { + fee: this.assembled.raw.fee, + networkPassphrase: this.options.networkPassphrase, + }) + .setTimeout(TimeoutInfinite) + .addOperation(Operation.invokeHostFunction({ ...op, auth: op.auth ?? [] })) + .build(); + await this.assembled.simulate(); + } + const signature = await wallet.signTransaction(this.assembled.raw.toXDR(), { + networkPassphrase: this.options.networkPassphrase, + }); + this.signed = TransactionBuilder.fromXDR(signature, this.options.networkPassphrase); + return this.server.sendTransaction(this.signed); + }, resp => resp.status !== "PENDING", secondsToWait); + this.sendTransactionResponse = this.sendTransactionResponseAll[this.sendTransactionResponseAll.length - 1]; + if (this.sendTransactionResponse.status !== "PENDING") { + throw new Error(`Tried to resubmit transaction for ${secondsToWait} seconds, but it's still failing. ` + + `All attempts: ${JSON.stringify(this.sendTransactionResponseAll, null, 2)}`); + } + const { hash } = this.sendTransactionResponse; + this.getTransactionResponseAll = await withExponentialBackoff(() => this.server.getTransaction(hash), resp => resp.status === SorobanRpc.Api.GetTransactionStatus.NOT_FOUND, secondsToWait); + this.getTransactionResponse = this.getTransactionResponseAll[this.getTransactionResponseAll.length - 1]; + if (this.getTransactionResponse.status === SorobanRpc.Api.GetTransactionStatus.NOT_FOUND) { + console.error(`Waited ${secondsToWait} seconds for transaction to complete, but it did not. ` + + `Returning anyway. Check the transaction status manually. ` + + `Sent transaction: ${JSON.stringify(this.sendTransactionResponse, null, 2)}\n` + + `All attempts to get the result: ${JSON.stringify(this.getTransactionResponseAll, null, 2)}`); + } + return this; + }; + get result() { + // 1. check if transaction was submitted and awaited with `getTransaction` + if ("getTransactionResponse" in this && + this.getTransactionResponse) { + // getTransactionResponse has a `returnValue` field unless it failed + if ("returnValue" in this.getTransactionResponse) { + return this.options.parseResultXdr(this.getTransactionResponse.returnValue); + } + // if "returnValue" not present, the transaction failed; return without parsing the result + throw new Error("Transaction failed! Cannot parse result."); + } + // 2. otherwise, maybe it was merely sent with `sendTransaction` + if (this.sendTransactionResponse) { + const errorResult = this.sendTransactionResponse.errorResult?.result(); + if (errorResult) { + throw new SendFailedError(`Transaction simulation looked correct, but attempting to send the transaction failed. Check \`simulation\` and \`sendTransactionResponseAll\` to troubleshoot. Decoded \`sendTransactionResponse.errorResultXdr\`: ${errorResult}`); + } + throw new SendResultOnlyError(`Transaction was sent to the network, but not yet awaited. No result to show. Await transaction completion with \`getTransaction(sendTransactionResponse.hash)\``); + } + // 3. finally, if neither of those are present, throw an error + throw new Error(`Sending transaction failed: ${JSON.stringify(this.assembled)}`); + } +} +/** + * Keep calling a `fn` for `secondsToWait` seconds, if `keepWaitingIf` is true. + * Returns an array of all attempts to call the function. + */ +async function withExponentialBackoff(fn, keepWaitingIf, secondsToWait, exponentialFactor = 1.5, verbose = false) { + const attempts = []; + let count = 0; + attempts.push(await fn()); + if (!keepWaitingIf(attempts[attempts.length - 1])) + return attempts; + const waitUntil = new Date(Date.now() + secondsToWait * 1000).valueOf(); + let waitTime = 1000; + let totalWaitTime = waitTime; + while (Date.now() < waitUntil && keepWaitingIf(attempts[attempts.length - 1])) { + count++; + // Wait a beat + if (verbose) { + console.info(`Waiting ${waitTime}ms before trying again (bringing the total wait time to ${totalWaitTime}ms so far, of total ${secondsToWait * 1000}ms)`); + } + await new Promise(res => setTimeout(res, waitTime)); + // Exponential backoff + waitTime = waitTime * exponentialFactor; + if (new Date(Date.now() + waitTime).valueOf() > waitUntil) { + waitTime = waitUntil - Date.now(); + if (verbose) { + console.info(`was gonna wait too long; new waitTime: ${waitTime}ms`); + } + } + totalWaitTime = waitTime + totalWaitTime; + // Try again + attempts.push(await fn(attempts[attempts.length - 1])); + if (verbose && keepWaitingIf(attempts[attempts.length - 1])) { + console.info(`${count}. Called ${fn}; ${attempts.length} prev attempts. Most recent: ${JSON.stringify(attempts[attempts.length - 1], null, 2)}`); + } + } + return attempts; +} diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/index.d.ts b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/index.d.ts new file mode 100644 index 00000000..66d3d595 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/index.d.ts @@ -0,0 +1,424 @@ +import { ContractSpec } from '@stellar/stellar-sdk'; +import { Buffer } from "buffer"; +import { AssembledTransaction, Ok, Err } from './assembled-tx.js'; +import type { u32, i32, i64, i128, Option, Error_ } from './assembled-tx.js'; +import type { ClassOptions } from './method-options.js'; +export * from './assembled-tx.js'; +export * from './method-options.js'; +export declare const networks: { + readonly futurenet: { + readonly networkPassphrase: "Test SDF Future Network ; October 2022"; + readonly contractId: "CBYMYMSDF6FBDNCFJCRC7KMO4REYFPOH2U4N7FXI3GJO6YXNCQ43CDSK"; + }; +}; +/** + This is from the rust doc above the struct Test + */ +export interface Test { + /** + + */ + a: u32; + /** + + */ + b: boolean; + /** + + */ + c: string; +} +/** + + */ +export type SimpleEnum = { + tag: "First"; + values: void; +} | { + tag: "Second"; + values: void; +} | { + tag: "Third"; + values: void; +}; +/** + + */ +export declare enum RoyalCard { + Jack = 11, + Queen = 12, + King = 13 +} +/** + + */ +export type TupleStruct = readonly [Test, SimpleEnum]; +/** + + */ +export type ComplexEnum = { + tag: "Struct"; + values: readonly [Test]; +} | { + tag: "Tuple"; + values: readonly [TupleStruct]; +} | { + tag: "Enum"; + values: readonly [SimpleEnum]; +} | { + tag: "Asset"; + values: readonly [string, i128]; +} | { + tag: "Void"; + values: void; +}; +/** + + */ +export declare const Errors: { + 1: { + message: string; + }; +}; +export declare class Contract { + readonly options: ClassOptions; + spec: ContractSpec; + constructor(options: ClassOptions); + private readonly parsers; + private txFromJSON; + readonly fromJSON: { + hello: (json: string) => AssembledTransaction; + woid: (json: string) => AssembledTransaction; + val: (json: string) => AssembledTransaction; + u32FailOnEven: (json: string) => AssembledTransaction | Ok>; + u32: (json: string) => AssembledTransaction; + i32: (json: string) => AssembledTransaction; + i64: (json: string) => AssembledTransaction; + struktHel: (json: string) => AssembledTransaction; + strukt: (json: string) => AssembledTransaction; + simple: (json: string) => AssembledTransaction; + complex: (json: string) => AssembledTransaction; + addresse: (json: string) => AssembledTransaction; + bytes: (json: string) => AssembledTransaction; + bytesN: (json: string) => AssembledTransaction; + card: (json: string) => AssembledTransaction; + boolean: (json: string) => AssembledTransaction; + not: (json: string) => AssembledTransaction; + i128: (json: string) => AssembledTransaction; + u128: (json: string) => AssembledTransaction; + multiArgs: (json: string) => AssembledTransaction; + map: (json: string) => AssembledTransaction>; + vec: (json: string) => AssembledTransaction; + tuple: (json: string) => AssembledTransaction; + option: (json: string) => AssembledTransaction>; + u256: (json: string) => AssembledTransaction; + i256: (json: string) => AssembledTransaction; + string: (json: string) => AssembledTransaction; + tupleStrukt: (json: string) => AssembledTransaction; + }; + /** +* Construct and simulate a hello transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + hello: ({ hello }: { + hello: string; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a woid transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + woid: (options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a val transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + val: (options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a u32_fail_on_even transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + u32FailOnEven: ({ u32_ }: { + u32_: u32; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise | Ok>>; + /** +* Construct and simulate a u32_ transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + u32: ({ u32_ }: { + u32_: u32; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a i32_ transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + i32: ({ i32_ }: { + i32_: i32; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a i64_ transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + i64: ({ i64_ }: { + i64_: i64; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a strukt_hel transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object.Example contract method which takes a struct +*/ + struktHel: ({ strukt }: { + strukt: Test; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a strukt transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + strukt: ({ strukt }: { + strukt: Test; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a simple transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + simple: ({ simple }: { + simple: SimpleEnum; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a complex transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + complex: ({ complex }: { + complex: ComplexEnum; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a addresse transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + addresse: ({ addresse }: { + addresse: string; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a bytes transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + bytes: ({ bytes }: { + bytes: Buffer; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a bytes_n transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + bytesN: ({ bytes_n }: { + bytes_n: Buffer; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a card transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + card: ({ card }: { + card: RoyalCard; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a boolean transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + boolean: ({ boolean }: { + boolean: boolean; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a not transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object.Negates a boolean value +*/ + not: ({ boolean }: { + boolean: boolean; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a i128 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + i128: ({ i128 }: { + i128: bigint; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a u128 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + u128: ({ u128 }: { + u128: bigint; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a multi_args transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + multiArgs: ({ a, b }: { + a: u32; + b: boolean; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a map transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + map: ({ map }: { + map: Map; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>>; + /** +* Construct and simulate a vec transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + vec: ({ vec }: { + vec: Array; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a tuple transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + tuple: ({ tuple }: { + tuple: readonly [string, u32]; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a option transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object.Example of an optional argument +*/ + option: ({ option }: { + option: Option; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>>; + /** +* Construct and simulate a u256 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + u256: ({ u256 }: { + u256: bigint; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a i256 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + i256: ({ i256 }: { + i256: bigint; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a string transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + string: ({ string }: { + string: string; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a tuple_strukt transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + tupleStrukt: ({ tuple_strukt }: { + tuple_strukt: TupleStruct; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; +} diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/index.js b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/index.js new file mode 100644 index 00000000..b97cff55 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/index.js @@ -0,0 +1,509 @@ +import { ContractSpec, Address } from '@stellar/stellar-sdk'; +import { Buffer } from "buffer"; +import { AssembledTransaction, Ok, Err } from './assembled-tx.js'; +export * from './assembled-tx.js'; +export * from './method-options.js'; +if (typeof window !== 'undefined') { + //@ts-ignore Buffer exists + window.Buffer = window.Buffer || Buffer; +} +export const networks = { + futurenet: { + networkPassphrase: "Test SDF Future Network ; October 2022", + contractId: "CBYMYMSDF6FBDNCFJCRC7KMO4REYFPOH2U4N7FXI3GJO6YXNCQ43CDSK", + } +}; +/** + + */ +export var RoyalCard; +(function (RoyalCard) { + RoyalCard[RoyalCard["Jack"] = 11] = "Jack"; + RoyalCard[RoyalCard["Queen"] = 12] = "Queen"; + RoyalCard[RoyalCard["King"] = 13] = "King"; +})(RoyalCard || (RoyalCard = {})); +/** + + */ +export const Errors = { + 1: { message: "Please provide an odd number" } +}; +export class Contract { + options; + spec; + constructor(options) { + this.options = options; + this.spec = new ContractSpec([ + "AAAAAQAAAC9UaGlzIGlzIGZyb20gdGhlIHJ1c3QgZG9jIGFib3ZlIHRoZSBzdHJ1Y3QgVGVzdAAAAAAAAAAABFRlc3QAAAADAAAAAAAAAAFhAAAAAAAABAAAAAAAAAABYgAAAAAAAAEAAAAAAAAAAWMAAAAAAAAR", + "AAAAAgAAAAAAAAAAAAAAClNpbXBsZUVudW0AAAAAAAMAAAAAAAAAAAAAAAVGaXJzdAAAAAAAAAAAAAAAAAAABlNlY29uZAAAAAAAAAAAAAAAAAAFVGhpcmQAAAA=", + "AAAAAwAAAAAAAAAAAAAACVJveWFsQ2FyZAAAAAAAAAMAAAAAAAAABEphY2sAAAALAAAAAAAAAAVRdWVlbgAAAAAAAAwAAAAAAAAABEtpbmcAAAAN", + "AAAAAQAAAAAAAAAAAAAAC1R1cGxlU3RydWN0AAAAAAIAAAAAAAAAATAAAAAAAAfQAAAABFRlc3QAAAAAAAAAATEAAAAAAAfQAAAAClNpbXBsZUVudW0AAA==", + "AAAAAgAAAAAAAAAAAAAAC0NvbXBsZXhFbnVtAAAAAAUAAAABAAAAAAAAAAZTdHJ1Y3QAAAAAAAEAAAfQAAAABFRlc3QAAAABAAAAAAAAAAVUdXBsZQAAAAAAAAEAAAfQAAAAC1R1cGxlU3RydWN0AAAAAAEAAAAAAAAABEVudW0AAAABAAAH0AAAAApTaW1wbGVFbnVtAAAAAAABAAAAAAAAAAVBc3NldAAAAAAAAAIAAAATAAAACwAAAAAAAAAAAAAABFZvaWQ=", + "AAAABAAAAAAAAAAAAAAABUVycm9yAAAAAAAAAQAAABxQbGVhc2UgcHJvdmlkZSBhbiBvZGQgbnVtYmVyAAAAD051bWJlck11c3RCZU9kZAAAAAAB", + "AAAAAAAAAAAAAAAFaGVsbG8AAAAAAAABAAAAAAAAAAVoZWxsbwAAAAAAABEAAAABAAAAEQ==", + "AAAAAAAAAAAAAAAEd29pZAAAAAAAAAAA", + "AAAAAAAAAAAAAAADdmFsAAAAAAAAAAABAAAAAA==", + "AAAAAAAAAAAAAAAQdTMyX2ZhaWxfb25fZXZlbgAAAAEAAAAAAAAABHUzMl8AAAAEAAAAAQAAA+kAAAAEAAAAAw==", + "AAAAAAAAAAAAAAAEdTMyXwAAAAEAAAAAAAAABHUzMl8AAAAEAAAAAQAAAAQ=", + "AAAAAAAAAAAAAAAEaTMyXwAAAAEAAAAAAAAABGkzMl8AAAAFAAAAAQAAAAU=", + "AAAAAAAAAAAAAAAEaTY0XwAAAAEAAAAAAAAABGk2NF8AAAAHAAAAAQAAAAc=", + "AAAAAAAAACxFeGFtcGxlIGNvbnRyYWN0IG1ldGhvZCB3aGljaCB0YWtlcyBhIHN0cnVjdAAAAApzdHJ1a3RfaGVsAAAAAAABAAAAAAAAAAZzdHJ1a3QAAAAAB9AAAAAEVGVzdAAAAAEAAAPqAAAAEQ==", + "AAAAAAAAAAAAAAAGc3RydWt0AAAAAAABAAAAAAAAAAZzdHJ1a3QAAAAAB9AAAAAEVGVzdAAAAAEAAAfQAAAABFRlc3Q=", + "AAAAAAAAAAAAAAAGc2ltcGxlAAAAAAABAAAAAAAAAAZzaW1wbGUAAAAAB9AAAAAKU2ltcGxlRW51bQAAAAAAAQAAB9AAAAAKU2ltcGxlRW51bQAA", + "AAAAAAAAAAAAAAAHY29tcGxleAAAAAABAAAAAAAAAAdjb21wbGV4AAAAB9AAAAALQ29tcGxleEVudW0AAAAAAQAAB9AAAAALQ29tcGxleEVudW0A", + "AAAAAAAAAAAAAAAIYWRkcmVzc2UAAAABAAAAAAAAAAhhZGRyZXNzZQAAABMAAAABAAAAEw==", + "AAAAAAAAAAAAAAAFYnl0ZXMAAAAAAAABAAAAAAAAAAVieXRlcwAAAAAAAA4AAAABAAAADg==", + "AAAAAAAAAAAAAAAHYnl0ZXNfbgAAAAABAAAAAAAAAAdieXRlc19uAAAAA+4AAAAJAAAAAQAAA+4AAAAJ", + "AAAAAAAAAAAAAAAEY2FyZAAAAAEAAAAAAAAABGNhcmQAAAfQAAAACVJveWFsQ2FyZAAAAAAAAAEAAAfQAAAACVJveWFsQ2FyZAAAAA==", + "AAAAAAAAAAAAAAAHYm9vbGVhbgAAAAABAAAAAAAAAAdib29sZWFuAAAAAAEAAAABAAAAAQ==", + "AAAAAAAAABdOZWdhdGVzIGEgYm9vbGVhbiB2YWx1ZQAAAAADbm90AAAAAAEAAAAAAAAAB2Jvb2xlYW4AAAAAAQAAAAEAAAAB", + "AAAAAAAAAAAAAAAEaTEyOAAAAAEAAAAAAAAABGkxMjgAAAALAAAAAQAAAAs=", + "AAAAAAAAAAAAAAAEdTEyOAAAAAEAAAAAAAAABHUxMjgAAAAKAAAAAQAAAAo=", + "AAAAAAAAAAAAAAAKbXVsdGlfYXJncwAAAAAAAgAAAAAAAAABYQAAAAAAAAQAAAAAAAAAAWIAAAAAAAABAAAAAQAAAAQ=", + "AAAAAAAAAAAAAAADbWFwAAAAAAEAAAAAAAAAA21hcAAAAAPsAAAABAAAAAEAAAABAAAD7AAAAAQAAAAB", + "AAAAAAAAAAAAAAADdmVjAAAAAAEAAAAAAAAAA3ZlYwAAAAPqAAAABAAAAAEAAAPqAAAABA==", + "AAAAAAAAAAAAAAAFdHVwbGUAAAAAAAABAAAAAAAAAAV0dXBsZQAAAAAAA+0AAAACAAAAEQAAAAQAAAABAAAD7QAAAAIAAAARAAAABA==", + "AAAAAAAAAB9FeGFtcGxlIG9mIGFuIG9wdGlvbmFsIGFyZ3VtZW50AAAAAAZvcHRpb24AAAAAAAEAAAAAAAAABm9wdGlvbgAAAAAD6AAAAAQAAAABAAAD6AAAAAQ=", + "AAAAAAAAAAAAAAAEdTI1NgAAAAEAAAAAAAAABHUyNTYAAAAMAAAAAQAAAAw=", + "AAAAAAAAAAAAAAAEaTI1NgAAAAEAAAAAAAAABGkyNTYAAAANAAAAAQAAAA0=", + "AAAAAAAAAAAAAAAGc3RyaW5nAAAAAAABAAAAAAAAAAZzdHJpbmcAAAAAABAAAAABAAAAEA==", + "AAAAAAAAAAAAAAAMdHVwbGVfc3RydWt0AAAAAQAAAAAAAAAMdHVwbGVfc3RydWt0AAAH0AAAAAtUdXBsZVN0cnVjdAAAAAABAAAH0AAAAAtUdXBsZVN0cnVjdAA=" + ]); + } + parsers = { + hello: (result) => this.spec.funcResToNative("hello", result), + woid: () => { }, + val: (result) => this.spec.funcResToNative("val", result), + u32FailOnEven: (result) => { + if (result instanceof Err) + return result; + return new Ok(this.spec.funcResToNative("u32_fail_on_even", result)); + }, + u32: (result) => this.spec.funcResToNative("u32_", result), + i32: (result) => this.spec.funcResToNative("i32_", result), + i64: (result) => this.spec.funcResToNative("i64_", result), + struktHel: (result) => this.spec.funcResToNative("strukt_hel", result), + strukt: (result) => this.spec.funcResToNative("strukt", result), + simple: (result) => this.spec.funcResToNative("simple", result), + complex: (result) => this.spec.funcResToNative("complex", result), + addresse: (result) => this.spec.funcResToNative("addresse", result), + bytes: (result) => this.spec.funcResToNative("bytes", result), + bytesN: (result) => this.spec.funcResToNative("bytes_n", result), + card: (result) => this.spec.funcResToNative("card", result), + boolean: (result) => this.spec.funcResToNative("boolean", result), + not: (result) => this.spec.funcResToNative("not", result), + i128: (result) => this.spec.funcResToNative("i128", result), + u128: (result) => this.spec.funcResToNative("u128", result), + multiArgs: (result) => this.spec.funcResToNative("multi_args", result), + map: (result) => this.spec.funcResToNative("map", result), + vec: (result) => this.spec.funcResToNative("vec", result), + tuple: (result) => this.spec.funcResToNative("tuple", result), + option: (result) => this.spec.funcResToNative("option", result), + u256: (result) => this.spec.funcResToNative("u256", result), + i256: (result) => this.spec.funcResToNative("i256", result), + string: (result) => this.spec.funcResToNative("string", result), + tupleStrukt: (result) => this.spec.funcResToNative("tuple_strukt", result) + }; + txFromJSON = (json) => { + const { method, ...tx } = JSON.parse(json); + return AssembledTransaction.fromJSON({ + ...this.options, + method, + parseResultXdr: this.parsers[method], + }, tx); + }; + fromJSON = { + hello: (this.txFromJSON), + woid: (this.txFromJSON), + val: (this.txFromJSON), + u32FailOnEven: (this.txFromJSON), + u32: (this.txFromJSON), + i32: (this.txFromJSON), + i64: (this.txFromJSON), + struktHel: (this.txFromJSON), + strukt: (this.txFromJSON), + simple: (this.txFromJSON), + complex: (this.txFromJSON), + addresse: (this.txFromJSON), + bytes: (this.txFromJSON), + bytesN: (this.txFromJSON), + card: (this.txFromJSON), + boolean: (this.txFromJSON), + not: (this.txFromJSON), + i128: (this.txFromJSON), + u128: (this.txFromJSON), + multiArgs: (this.txFromJSON), + map: (this.txFromJSON), + vec: (this.txFromJSON), + tuple: (this.txFromJSON), + option: (this.txFromJSON), + u256: (this.txFromJSON), + i256: (this.txFromJSON), + string: (this.txFromJSON), + tupleStrukt: (this.txFromJSON) + }; + /** +* Construct and simulate a hello transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + hello = async ({ hello }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'hello', + args: this.spec.funcArgsToScVals("hello", { hello }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['hello'], + }); + }; + /** +* Construct and simulate a woid transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + woid = async (options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'woid', + args: this.spec.funcArgsToScVals("woid", {}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['woid'], + }); + }; + /** +* Construct and simulate a val transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + val = async (options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'val', + args: this.spec.funcArgsToScVals("val", {}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['val'], + }); + }; + /** +* Construct and simulate a u32_fail_on_even transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + u32FailOnEven = async ({ u32_ }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'u32_fail_on_even', + args: this.spec.funcArgsToScVals("u32_fail_on_even", { u32_ }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['u32FailOnEven'], + }); + }; + /** +* Construct and simulate a u32_ transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + u32 = async ({ u32_ }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'u32_', + args: this.spec.funcArgsToScVals("u32_", { u32_ }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['u32'], + }); + }; + /** +* Construct and simulate a i32_ transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + i32 = async ({ i32_ }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'i32_', + args: this.spec.funcArgsToScVals("i32_", { i32_ }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['i32'], + }); + }; + /** +* Construct and simulate a i64_ transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + i64 = async ({ i64_ }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'i64_', + args: this.spec.funcArgsToScVals("i64_", { i64_ }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['i64'], + }); + }; + /** +* Construct and simulate a strukt_hel transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object.Example contract method which takes a struct +*/ + struktHel = async ({ strukt }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'strukt_hel', + args: this.spec.funcArgsToScVals("strukt_hel", { strukt }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['struktHel'], + }); + }; + /** +* Construct and simulate a strukt transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + strukt = async ({ strukt }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'strukt', + args: this.spec.funcArgsToScVals("strukt", { strukt }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['strukt'], + }); + }; + /** +* Construct and simulate a simple transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + simple = async ({ simple }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'simple', + args: this.spec.funcArgsToScVals("simple", { simple }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['simple'], + }); + }; + /** +* Construct and simulate a complex transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + complex = async ({ complex }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'complex', + args: this.spec.funcArgsToScVals("complex", { complex }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['complex'], + }); + }; + /** +* Construct and simulate a addresse transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + addresse = async ({ addresse }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'addresse', + args: this.spec.funcArgsToScVals("addresse", { addresse: new Address(addresse) }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['addresse'], + }); + }; + /** +* Construct and simulate a bytes transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + bytes = async ({ bytes }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'bytes', + args: this.spec.funcArgsToScVals("bytes", { bytes }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['bytes'], + }); + }; + /** +* Construct and simulate a bytes_n transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + bytesN = async ({ bytes_n }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'bytes_n', + args: this.spec.funcArgsToScVals("bytes_n", { bytes_n }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['bytesN'], + }); + }; + /** +* Construct and simulate a card transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + card = async ({ card }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'card', + args: this.spec.funcArgsToScVals("card", { card }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['card'], + }); + }; + /** +* Construct and simulate a boolean transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + boolean = async ({ boolean }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'boolean', + args: this.spec.funcArgsToScVals("boolean", { boolean }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['boolean'], + }); + }; + /** +* Construct and simulate a not transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object.Negates a boolean value +*/ + not = async ({ boolean }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'not', + args: this.spec.funcArgsToScVals("not", { boolean }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['not'], + }); + }; + /** +* Construct and simulate a i128 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + i128 = async ({ i128 }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'i128', + args: this.spec.funcArgsToScVals("i128", { i128 }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['i128'], + }); + }; + /** +* Construct and simulate a u128 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + u128 = async ({ u128 }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'u128', + args: this.spec.funcArgsToScVals("u128", { u128 }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['u128'], + }); + }; + /** +* Construct and simulate a multi_args transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + multiArgs = async ({ a, b }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'multi_args', + args: this.spec.funcArgsToScVals("multi_args", { a, b }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['multiArgs'], + }); + }; + /** +* Construct and simulate a map transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + map = async ({ map }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'map', + args: this.spec.funcArgsToScVals("map", { map }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['map'], + }); + }; + /** +* Construct and simulate a vec transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + vec = async ({ vec }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'vec', + args: this.spec.funcArgsToScVals("vec", { vec }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['vec'], + }); + }; + /** +* Construct and simulate a tuple transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + tuple = async ({ tuple }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'tuple', + args: this.spec.funcArgsToScVals("tuple", { tuple }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['tuple'], + }); + }; + /** +* Construct and simulate a option transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object.Example of an optional argument +*/ + option = async ({ option }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'option', + args: this.spec.funcArgsToScVals("option", { option }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['option'], + }); + }; + /** +* Construct and simulate a u256 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + u256 = async ({ u256 }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'u256', + args: this.spec.funcArgsToScVals("u256", { u256 }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['u256'], + }); + }; + /** +* Construct and simulate a i256 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + i256 = async ({ i256 }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'i256', + args: this.spec.funcArgsToScVals("i256", { i256 }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['i256'], + }); + }; + /** +* Construct and simulate a string transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + string = async ({ string }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'string', + args: this.spec.funcArgsToScVals("string", { string }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['string'], + }); + }; + /** +* Construct and simulate a tuple_strukt transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + tupleStrukt = async ({ tuple_strukt }, options = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'tuple_strukt', + args: this.spec.funcArgsToScVals("tuple_strukt", { tuple_strukt }), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['tupleStrukt'], + }); + }; +} diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/method-options.d.ts b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/method-options.d.ts new file mode 100644 index 00000000..fc6b21d5 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/method-options.d.ts @@ -0,0 +1,47 @@ +declare let responseTypes: 'simulated' | 'full' | undefined; +export type ResponseTypes = typeof responseTypes; +export type XDR_BASE64 = string; +export interface Wallet { + isConnected: () => Promise; + isAllowed: () => Promise; + getUserInfo: () => Promise<{ + publicKey?: string; + }>; + signTransaction: (tx: XDR_BASE64, opts?: { + network?: string; + networkPassphrase?: string; + accountToSign?: string; + }) => Promise; + signAuthEntry: (entryXdr: XDR_BASE64, opts?: { + accountToSign?: string; + }) => Promise; +} +export type ClassOptions = { + contractId: string; + networkPassphrase: string; + rpcUrl: string; + errorTypes?: Record; + /** + * A Wallet interface, such as Freighter, that has the methods `isConnected`, `isAllowed`, `getUserInfo`, and `signTransaction`. If not provided, will attempt to import and use Freighter. Example: + * + * @example + * ```ts + * import freighter from "@stellar/freighter-api"; + * import { Contract } from "test_custom_types"; + * const contract = new Contract({ + * …, + * wallet: freighter, + * }) + * ``` + */ + wallet?: Wallet; +}; +export type MethodOptions = { + /** + * The fee to pay for the transaction. Default: soroban-sdk's BASE_FEE ('100') + */ + fee?: number; +}; +export {}; diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/method-options.js b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/method-options.js new file mode 100644 index 00000000..00ad9d3c --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/method-options.js @@ -0,0 +1,3 @@ +// defined this way so typeahead shows full union, not named alias +let responseTypes; +export {}; diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/package.json b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/package.json new file mode 100644 index 00000000..1632c2c4 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/esm/package.json @@ -0,0 +1 @@ +{"type": "module"} \ No newline at end of file diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/types/assembled-tx.d.ts b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/types/assembled-tx.d.ts new file mode 100644 index 00000000..1d5e6f5e --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/types/assembled-tx.d.ts @@ -0,0 +1,184 @@ +import { Account, Address, Operation, SorobanRpc, xdr } from "@stellar/stellar-sdk"; +import type { Memo, MemoType, Transaction } from "@stellar/stellar-sdk"; +import type { ClassOptions, MethodOptions, Wallet, XDR_BASE64 } from "./method-options.js"; +export type Tx = Transaction, Operation[]>; +export declare class ExpiredStateError extends Error { +} +export declare class NeedsMoreSignaturesError extends Error { +} +export declare class WalletDisconnectedError extends Error { +} +export declare class SendResultOnlyError extends Error { +} +export declare class SendFailedError extends Error { +} +export declare class NoUnsignedNonInvokerAuthEntriesError extends Error { +} +type SendTx = SorobanRpc.Api.SendTransactionResponse; +type GetTx = SorobanRpc.Api.GetTransactionResponse; +export type u32 = number; +export type i32 = number; +export type u64 = bigint; +export type i64 = bigint; +export type u128 = bigint; +export type i128 = bigint; +export type u256 = bigint; +export type i256 = bigint; +export type Option = T | undefined; +export type Typepoint = bigint; +export type Duration = bigint; +export { Address }; +export interface Error_ { + message: string; +} +export interface Result { + unwrap(): T; + unwrapErr(): E; + isOk(): boolean; + isErr(): boolean; +} +export declare class Ok implements Result { + readonly value: T; + constructor(value: T); + unwrapErr(): E; + unwrap(): T; + isOk(): boolean; + isErr(): boolean; +} +export declare class Err implements Result { + readonly error: E; + constructor(error: E); + unwrapErr(): E; + unwrap(): never; + isOk(): boolean; + isErr(): boolean; +} +export declare const contractErrorPattern: RegExp; +type AssembledTransactionOptions = MethodOptions & ClassOptions & { + method: string; + args?: any[]; + parseResultXdr: (xdr: string | xdr.ScVal | Err) => T; +}; +export declare const NULL_ACCOUNT = "GAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWHF"; +export declare class AssembledTransaction { + options: AssembledTransactionOptions; + raw: Tx; + private simulation?; + private simulationResult?; + private simulationTransactionData?; + private server; + toJSON(): string; + static fromJSON(options: Omit, 'args'>, { tx, simulationResult, simulationTransactionData }: { + tx: XDR_BASE64; + simulationResult: { + auth: XDR_BASE64[]; + retval: XDR_BASE64; + }; + simulationTransactionData: XDR_BASE64; + }): AssembledTransaction; + private constructor(); + static fromSimulation(options: AssembledTransactionOptions): Promise>; + simulate: () => Promise; + get simulationData(): { + result: SorobanRpc.Api.SimulateHostFunctionResult; + transactionData: xdr.SorobanTransactionData; + }; + get result(): T; + parseError(errorMessage: string): Err | undefined; + getWallet: () => Promise; + getPublicKey: () => Promise; + /** + * Get account details from the Soroban network for the publicKey currently + * selected in user's wallet. If not connected to Freighter, use placeholder + * null account. + */ + getAccount: () => Promise; + /** + * Sign the transaction with the `wallet` (default Freighter), then send to + * the network and return a `SentTransaction` that keeps track of all the + * attempts to send and fetch the transaction from the network. + */ + signAndSend: ({ secondsToWait, force }?: { + /** + * Wait `secondsToWait` seconds (default: 10) for both the transaction to SEND successfully (will keep trying if the server returns `TRY_AGAIN_LATER`), as well as for the transaction to COMPLETE (will keep checking if the server returns `PENDING`). + */ + secondsToWait?: number | undefined; + /** + * If `true`, sign and send the transaction even if it is a read call. + */ + force?: boolean | undefined; + }) => Promise>; + getStorageExpiration: () => Promise; + /** + * Get a list of accounts, other than the invoker of the simulation, that + * need to sign auth entries in this transaction. + * + * Soroban allows multiple people to sign a transaction. Someone needs to + * sign the final transaction envelope; this person/account is called the + * _invoker_, or _source_. Other accounts might need to sign individual auth + * entries in the transaction, if they're not also the invoker. + * + * This function returns a list of accounts that need to sign auth entries, + * assuming that the same invoker/source account will sign the final + * transaction envelope as signed the initial simulation. + * + * One at a time, for each public key in this array, you will need to + * serialize this transaction with `toJSON`, send to the owner of that key, + * deserialize the transaction with `txFromJson`, and call + * {@link signAuthEntries}. Then re-serialize and send to the next account + * in this list. + */ + needsNonInvokerSigningBy: ({ includeAlreadySigned, }?: { + /** + * Whether or not to include auth entries that have already been signed. Default: false + */ + includeAlreadySigned?: boolean | undefined; + }) => Promise; + preImageFor(entry: xdr.SorobanAuthorizationEntry, signatureExpirationLedger: number): xdr.HashIdPreimage; + /** + * If {@link needsNonInvokerSigningBy} returns a non-empty list, you can serialize + * the transaction with `toJSON`, send it to the owner of one of the public keys + * in the map, deserialize with `txFromJSON`, and call this method on their + * machine. Internally, this will use `signAuthEntry` function from connected + * `wallet` for each. + * + * Then, re-serialize the transaction and either send to the next + * `needsNonInvokerSigningBy` owner, or send it back to the original account + * who simulated the transaction so they can {@link sign} the transaction + * envelope and {@link send} it to the network. + * + * Sending to all `needsNonInvokerSigningBy` owners in parallel is not currently + * supported! + */ + signAuthEntries: (expiration?: number | Promise) => Promise; + get isReadCall(): boolean; + hasRealInvoker: () => Promise; +} +/** + * A transaction that has been sent to the Soroban network. This happens in two steps: + * + * 1. `sendTransaction`: initial submission of the transaction to the network. + * This step can run into problems, and will be retried with exponential + * backoff if it does. See all attempts in `sendTransactionResponseAll` and the + * most recent attempt in `sendTransactionResponse`. + * 2. `getTransaction`: once the transaction has been submitted to the network + * successfully, you need to wait for it to finalize to get the results of the + * transaction. This step can also run into problems, and will be retried with + * exponential backoff if it does. See all attempts in + * `getTransactionResponseAll` and the most recent attempt in + * `getTransactionResponse`. + */ +declare class SentTransaction { + options: AssembledTransactionOptions; + assembled: AssembledTransaction; + server: SorobanRpc.Server; + signed: Tx; + sendTransactionResponse?: SendTx; + sendTransactionResponseAll?: SendTx[]; + getTransactionResponse?: GetTx; + getTransactionResponseAll?: GetTx[]; + constructor(options: AssembledTransactionOptions, assembled: AssembledTransaction); + static init: (options: AssembledTransactionOptions, assembled: AssembledTransaction, secondsToWait?: number) => Promise>; + private send; + get result(): T; +} diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/types/index.d.ts b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/types/index.d.ts new file mode 100644 index 00000000..66d3d595 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/types/index.d.ts @@ -0,0 +1,424 @@ +import { ContractSpec } from '@stellar/stellar-sdk'; +import { Buffer } from "buffer"; +import { AssembledTransaction, Ok, Err } from './assembled-tx.js'; +import type { u32, i32, i64, i128, Option, Error_ } from './assembled-tx.js'; +import type { ClassOptions } from './method-options.js'; +export * from './assembled-tx.js'; +export * from './method-options.js'; +export declare const networks: { + readonly futurenet: { + readonly networkPassphrase: "Test SDF Future Network ; October 2022"; + readonly contractId: "CBYMYMSDF6FBDNCFJCRC7KMO4REYFPOH2U4N7FXI3GJO6YXNCQ43CDSK"; + }; +}; +/** + This is from the rust doc above the struct Test + */ +export interface Test { + /** + + */ + a: u32; + /** + + */ + b: boolean; + /** + + */ + c: string; +} +/** + + */ +export type SimpleEnum = { + tag: "First"; + values: void; +} | { + tag: "Second"; + values: void; +} | { + tag: "Third"; + values: void; +}; +/** + + */ +export declare enum RoyalCard { + Jack = 11, + Queen = 12, + King = 13 +} +/** + + */ +export type TupleStruct = readonly [Test, SimpleEnum]; +/** + + */ +export type ComplexEnum = { + tag: "Struct"; + values: readonly [Test]; +} | { + tag: "Tuple"; + values: readonly [TupleStruct]; +} | { + tag: "Enum"; + values: readonly [SimpleEnum]; +} | { + tag: "Asset"; + values: readonly [string, i128]; +} | { + tag: "Void"; + values: void; +}; +/** + + */ +export declare const Errors: { + 1: { + message: string; + }; +}; +export declare class Contract { + readonly options: ClassOptions; + spec: ContractSpec; + constructor(options: ClassOptions); + private readonly parsers; + private txFromJSON; + readonly fromJSON: { + hello: (json: string) => AssembledTransaction; + woid: (json: string) => AssembledTransaction; + val: (json: string) => AssembledTransaction; + u32FailOnEven: (json: string) => AssembledTransaction | Ok>; + u32: (json: string) => AssembledTransaction; + i32: (json: string) => AssembledTransaction; + i64: (json: string) => AssembledTransaction; + struktHel: (json: string) => AssembledTransaction; + strukt: (json: string) => AssembledTransaction; + simple: (json: string) => AssembledTransaction; + complex: (json: string) => AssembledTransaction; + addresse: (json: string) => AssembledTransaction; + bytes: (json: string) => AssembledTransaction; + bytesN: (json: string) => AssembledTransaction; + card: (json: string) => AssembledTransaction; + boolean: (json: string) => AssembledTransaction; + not: (json: string) => AssembledTransaction; + i128: (json: string) => AssembledTransaction; + u128: (json: string) => AssembledTransaction; + multiArgs: (json: string) => AssembledTransaction; + map: (json: string) => AssembledTransaction>; + vec: (json: string) => AssembledTransaction; + tuple: (json: string) => AssembledTransaction; + option: (json: string) => AssembledTransaction>; + u256: (json: string) => AssembledTransaction; + i256: (json: string) => AssembledTransaction; + string: (json: string) => AssembledTransaction; + tupleStrukt: (json: string) => AssembledTransaction; + }; + /** +* Construct and simulate a hello transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + hello: ({ hello }: { + hello: string; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a woid transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + woid: (options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a val transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + val: (options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a u32_fail_on_even transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + u32FailOnEven: ({ u32_ }: { + u32_: u32; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise | Ok>>; + /** +* Construct and simulate a u32_ transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + u32: ({ u32_ }: { + u32_: u32; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a i32_ transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + i32: ({ i32_ }: { + i32_: i32; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a i64_ transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + i64: ({ i64_ }: { + i64_: i64; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a strukt_hel transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object.Example contract method which takes a struct +*/ + struktHel: ({ strukt }: { + strukt: Test; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a strukt transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + strukt: ({ strukt }: { + strukt: Test; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a simple transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + simple: ({ simple }: { + simple: SimpleEnum; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a complex transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + complex: ({ complex }: { + complex: ComplexEnum; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a addresse transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + addresse: ({ addresse }: { + addresse: string; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a bytes transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + bytes: ({ bytes }: { + bytes: Buffer; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a bytes_n transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + bytesN: ({ bytes_n }: { + bytes_n: Buffer; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a card transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + card: ({ card }: { + card: RoyalCard; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a boolean transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + boolean: ({ boolean }: { + boolean: boolean; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a not transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object.Negates a boolean value +*/ + not: ({ boolean }: { + boolean: boolean; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a i128 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + i128: ({ i128 }: { + i128: bigint; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a u128 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + u128: ({ u128 }: { + u128: bigint; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a multi_args transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + multiArgs: ({ a, b }: { + a: u32; + b: boolean; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a map transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + map: ({ map }: { + map: Map; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>>; + /** +* Construct and simulate a vec transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + vec: ({ vec }: { + vec: Array; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a tuple transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + tuple: ({ tuple }: { + tuple: readonly [string, u32]; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a option transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object.Example of an optional argument +*/ + option: ({ option }: { + option: Option; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>>; + /** +* Construct and simulate a u256 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + u256: ({ u256 }: { + u256: bigint; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a i256 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + i256: ({ i256 }: { + i256: bigint; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a string transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + string: ({ string }: { + string: string; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; + /** +* Construct and simulate a tuple_strukt transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. +*/ + tupleStrukt: ({ tuple_strukt }: { + tuple_strukt: TupleStruct; + }, options?: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number; + }) => Promise>; +} diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/types/method-options.d.ts b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/types/method-options.d.ts new file mode 100644 index 00000000..fc6b21d5 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/dist/types/method-options.d.ts @@ -0,0 +1,47 @@ +declare let responseTypes: 'simulated' | 'full' | undefined; +export type ResponseTypes = typeof responseTypes; +export type XDR_BASE64 = string; +export interface Wallet { + isConnected: () => Promise; + isAllowed: () => Promise; + getUserInfo: () => Promise<{ + publicKey?: string; + }>; + signTransaction: (tx: XDR_BASE64, opts?: { + network?: string; + networkPassphrase?: string; + accountToSign?: string; + }) => Promise; + signAuthEntry: (entryXdr: XDR_BASE64, opts?: { + accountToSign?: string; + }) => Promise; +} +export type ClassOptions = { + contractId: string; + networkPassphrase: string; + rpcUrl: string; + errorTypes?: Record; + /** + * A Wallet interface, such as Freighter, that has the methods `isConnected`, `isAllowed`, `getUserInfo`, and `signTransaction`. If not provided, will attempt to import and use Freighter. Example: + * + * @example + * ```ts + * import freighter from "@stellar/freighter-api"; + * import { Contract } from "test_custom_types"; + * const contract = new Contract({ + * …, + * wallet: freighter, + * }) + * ``` + */ + wallet?: Wallet; +}; +export type MethodOptions = { + /** + * The fee to pay for the transaction. Default: soroban-sdk's BASE_FEE ('100') + */ + fee?: number; +}; +export {}; diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/package-lock.json b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/package-lock.json new file mode 100644 index 00000000..44332a41 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/package-lock.json @@ -0,0 +1,328 @@ +{ + "name": "test_custom_types", + "version": "0.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "test_custom_types", + "version": "0.0.0", + "dependencies": { + "@stellar/freighter-api": "1.7.1", + "@stellar/stellar-sdk": "11.2.0", + "buffer": "6.0.3" + }, + "devDependencies": { + "typescript": "5.3.3" + } + }, + "node_modules/@stellar/freighter-api": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@stellar/freighter-api/-/freighter-api-1.7.1.tgz", + "integrity": "sha512-XvPO+XgEbkeP0VhP0U1edOkds+rGS28+y8GRGbCVXeZ9ZslbWqRFQoETAdX8IXGuykk2ib/aPokiLc5ZaWYP7w==" + }, + "node_modules/@stellar/js-xdr": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@stellar/js-xdr/-/js-xdr-3.0.1.tgz", + "integrity": "sha512-dp5Eh7Nr1YjiIeqpdkj2cQYxfoPudDAH3ck8MWggp48Htw66Z/hUssNYUQG/OftLjEmHT90Z/dtey2Y77DOxIw==" + }, + "node_modules/@stellar/stellar-base": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/@stellar/stellar-base/-/stellar-base-10.0.1.tgz", + "integrity": "sha512-BDbx7VHOEQh+4J3Q+gStNXgPaNckVFmD4aOlBBGwxlF6vPFmVnW8IoJdkX7T58zpX55eWI6DXvEhDBlrqTlhAQ==", + "dependencies": { + "@stellar/js-xdr": "^3.0.1", + "base32.js": "^0.1.0", + "bignumber.js": "^9.1.2", + "buffer": "^6.0.3", + "sha.js": "^2.3.6", + "tweetnacl": "^1.0.3" + }, + "optionalDependencies": { + "sodium-native": "^4.0.1" + } + }, + "node_modules/@stellar/stellar-sdk": { + "version": "11.2.0", + "resolved": "https://registry.npmjs.org/@stellar/stellar-sdk/-/stellar-sdk-11.2.0.tgz", + "integrity": "sha512-qInRR+mLLl9O/AI6Q+Sr19RZeYJtlNoJQJi3pch5BYoMvVhjO8IU8AhHADP//Zmc2osyogwPuqXBiFdaGlfHWA==", + "dependencies": { + "@stellar/stellar-base": "10.0.1", + "axios": "^1.6.5", + "bignumber.js": "^9.1.2", + "eventsource": "^2.0.2", + "randombytes": "^2.1.0", + "toml": "^3.0.0", + "urijs": "^1.19.1" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "node_modules/axios": { + "version": "1.6.5", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.5.tgz", + "integrity": "sha512-Ii012v05KEVuUoFWmMW/UQv9aRIc3ZwkWDcM+h5Il8izZCtRVpDUfwpoFf7eOtajT3QiGR4yDUx7lPqHJULgbg==", + "dependencies": { + "follow-redirects": "^1.15.4", + "form-data": "^4.0.0", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/base32.js": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/base32.js/-/base32.js-0.1.0.tgz", + "integrity": "sha512-n3TkB02ixgBOhTvANakDb4xaMXnYUVkNoRFJjQflcqMQhyEKxEHdj3E6N8t8sUQ0mjH/3/JxzlXuz3ul/J90pQ==", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/bignumber.js": { + "version": "9.1.2", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.1.2.tgz", + "integrity": "sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug==", + "engines": { + "node": "*" + } + }, + "node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/eventsource": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz", + "integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/follow-redirects": { + "version": "1.15.4", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz", + "integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-gyp-build": { + "version": "4.8.0", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.8.0.tgz", + "integrity": "sha512-u6fs2AEUljNho3EYTJNBfImO5QTo/J/1Etd+NVdCj7qWKUSN/bSLkZwhDv7I+w/MSC6qJ4cknepkAYykDdK8og==", + "optional": true, + "bin": { + "node-gyp-build": "bin.js", + "node-gyp-build-optional": "optional.js", + "node-gyp-build-test": "build-test.js" + } + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" + }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/sha.js": { + "version": "2.4.11", + "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", + "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", + "dependencies": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + }, + "bin": { + "sha.js": "bin.js" + } + }, + "node_modules/sodium-native": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/sodium-native/-/sodium-native-4.0.5.tgz", + "integrity": "sha512-YGimGhy7Ho6pTAAvuNdn3Tv9C2MD7HP89X1omReHat0Fd1mMnapGqwzb5YoHTAbIEh8tQmKP6+uLlwYCkf+EOA==", + "hasInstallScript": true, + "optional": true, + "dependencies": { + "node-gyp-build": "^4.6.0" + } + }, + "node_modules/toml": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/toml/-/toml-3.0.0.tgz", + "integrity": "sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w==" + }, + "node_modules/tweetnacl": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-1.0.3.tgz", + "integrity": "sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw==" + }, + "node_modules/typescript": { + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz", + "integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/urijs": { + "version": "1.19.11", + "resolved": "https://registry.npmjs.org/urijs/-/urijs-1.19.11.tgz", + "integrity": "sha512-HXgFDgDommxn5/bIv0cnQZsPhHDA90NPHD6+c/v21U5+Sx5hoP8+dP9IZXBU1gIfvdRfhG8cel9QNPeionfcCQ==" + } + } +} diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/package.json b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/package.json new file mode 100644 index 00000000..1f63968a --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/package.json @@ -0,0 +1,20 @@ +{ + "version": "0.0.0", + "name": "test_custom_types", + "dependencies": { + "@stellar/freighter-api": "1.7.1", + "buffer": "6.0.3", + "@stellar/stellar-sdk": "11.2.0" + }, + "scripts": { + "build": "node ./scripts/build.mjs" + }, + "exports": { + "require": "./dist/cjs/index.js", + "import": "./dist/esm/index.js" + }, + "typings": "dist/types/index.d.ts", + "devDependencies": { + "typescript": "5.3.3" + } +} diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/scripts/build.mjs b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/scripts/build.mjs new file mode 100644 index 00000000..15a17042 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/scripts/build.mjs @@ -0,0 +1,37 @@ +import { spawnSync } from "node:child_process" +import fs from "node:fs" +import path from "node:path" + +const buildDir = "./dist" + +const { error, stderr } = spawnSync("tsc", ["-b", "./scripts/tsconfig.cjs.json", "./scripts/tsconfig.esm.json", "./scripts/tsconfig.types.json"], { stdio: "inherit" }) + +if (error) { + console.error(stderr) + console.error(error) + throw error +} + +function createEsmModulePackageJson() { + fs.readdir(buildDir, function (err, dirs) { + if (err) { + throw err + } + dirs.forEach(function (dir) { + if (dir === "esm") { + // 1. add package.json file with "type": "module" + var packageJsonFile = path.join(buildDir, dir, "/package.json") + if (!fs.existsSync(packageJsonFile)) { + fs.writeFileSync( + packageJsonFile, + '{"type": "module"}', + 'utf8', + err => { if (err) throw err } + ) + } + } + }) + }) +} + +createEsmModulePackageJson() diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/scripts/tsconfig.cjs.json b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/scripts/tsconfig.cjs.json new file mode 100644 index 00000000..542ea86d --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/scripts/tsconfig.cjs.json @@ -0,0 +1,7 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "outDir": "../dist/cjs", + "module": "commonjs" + } +} diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/scripts/tsconfig.esm.json b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/scripts/tsconfig.esm.json new file mode 100644 index 00000000..92b45277 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/scripts/tsconfig.esm.json @@ -0,0 +1,7 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "outDir": "../dist/esm", + "module": "esnext" + } +} diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/scripts/tsconfig.types.json b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/scripts/tsconfig.types.json new file mode 100644 index 00000000..8a24fc13 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/scripts/tsconfig.types.json @@ -0,0 +1,8 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "outDir": "../dist/types", + "declaration": true, + "emitDeclarationOnly": true + } +} diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/src/assembled-tx.ts b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/src/assembled-tx.ts new file mode 100644 index 00000000..f4f892d4 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/src/assembled-tx.ts @@ -0,0 +1,664 @@ +import { + Account, + Address, + Contract, + Operation, + SorobanRpc, + StrKey, + TimeoutInfinite, + TransactionBuilder, + authorizeEntry, + hash, + nativeToScVal, + xdr, + BASE_FEE, +} from "@stellar/stellar-sdk"; +import type { Memo, MemoType, Transaction } from "@stellar/stellar-sdk"; +import { Buffer } from "buffer"; +import type { + ClassOptions, + MethodOptions, + Wallet, + XDR_BASE64, +} from "./method-options.js"; + +export type Tx = Transaction, Operation[]> + +export class ExpiredStateError extends Error { } +export class NeedsMoreSignaturesError extends Error { } +export class WalletDisconnectedError extends Error { } +export class SendResultOnlyError extends Error { } +export class SendFailedError extends Error { } +export class NoUnsignedNonInvokerAuthEntriesError extends Error { } + +type SendTx = SorobanRpc.Api.SendTransactionResponse; +type GetTx = SorobanRpc.Api.GetTransactionResponse; + +export type u32 = number; +export type i32 = number; +export type u64 = bigint; +export type i64 = bigint; +export type u128 = bigint; +export type i128 = bigint; +export type u256 = bigint; +export type i256 = bigint; +export type Option = T | undefined; +export type Typepoint = bigint; +export type Duration = bigint; +export {Address}; + +/// Error interface containing the error message +export interface Error_ { message: string }; + +export interface Result { + unwrap(): T, + unwrapErr(): E, + isOk(): boolean, + isErr(): boolean, +}; + +export class Ok implements Result { + constructor(readonly value: T) { } + unwrapErr(): E { + throw new Error('No error'); + } + unwrap(): T { + return this.value; + } + + isOk(): boolean { + return true; + } + + isErr(): boolean { + return !this.isOk() + } +} + +export class Err implements Result { + constructor(readonly error: E) { } + unwrapErr(): E { + return this.error; + } + unwrap(): never { + throw new Error(this.error.message); + } + + isOk(): boolean { + return false; + } + + isErr(): boolean { + return !this.isOk() + } +} + +export const contractErrorPattern = /Error\(Contract, #(\d+)\)/; + +type AssembledTransactionOptions = MethodOptions & + ClassOptions & { + method: string; + args?: any[]; + parseResultXdr: (xdr: string | xdr.ScVal | Err) => T; + }; + +export const NULL_ACCOUNT = "GAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWHF" + +export class AssembledTransaction { + public raw: Tx + private simulation?: SorobanRpc.Api.SimulateTransactionResponse + private simulationResult?: SorobanRpc.Api.SimulateHostFunctionResult + private simulationTransactionData?: xdr.SorobanTransactionData + private server: SorobanRpc.Server + + toJSON() { + return JSON.stringify({ + method: this.options.method, + tx: this.raw?.toXDR(), + simulationResult: { + auth: this.simulationData.result.auth.map(a => a.toXDR('base64')), + retval: this.simulationData.result.retval.toXDR('base64'), + }, + simulationTransactionData: this.simulationData.transactionData.toXDR('base64'), + }) + } + + static fromJSON( + options: Omit, 'args'>, + { tx, simulationResult, simulationTransactionData }: + { + tx: XDR_BASE64, + simulationResult: { + auth: XDR_BASE64[], + retval: XDR_BASE64, + }, + simulationTransactionData: XDR_BASE64, + } + ): AssembledTransaction { + const txn = new AssembledTransaction(options) + txn.raw = TransactionBuilder.fromXDR(tx, options.networkPassphrase) as Tx + txn.simulationResult = { + auth: simulationResult.auth.map(a => xdr.SorobanAuthorizationEntry.fromXDR(a, 'base64')), + retval: xdr.ScVal.fromXDR(simulationResult.retval, 'base64'), + } + txn.simulationTransactionData = xdr.SorobanTransactionData.fromXDR(simulationTransactionData, 'base64') + return txn + } + + private constructor(public options: AssembledTransactionOptions) { + this.server = new SorobanRpc.Server(this.options.rpcUrl, { + allowHttp: this.options.rpcUrl.startsWith("http://"), + }); + } + + static async fromSimulation(options: AssembledTransactionOptions): Promise> { + const tx = new AssembledTransaction(options) + const contract = new Contract(options.contractId); + + tx.raw = new TransactionBuilder(await tx.getAccount(), { + fee: options.fee?.toString(10) ?? BASE_FEE, + networkPassphrase: options.networkPassphrase, + }) + .addOperation(contract.call(options.method, ...(options.args ?? []))) + .setTimeout(TimeoutInfinite) + .build(); + + return await tx.simulate() + } + + simulate = async (): Promise => { + if (!this.raw) throw new Error('Transaction has not yet been assembled') + this.simulation = await this.server.simulateTransaction(this.raw); + + if (SorobanRpc.Api.isSimulationSuccess(this.simulation)) { + this.raw = SorobanRpc.assembleTransaction( + this.raw, + this.simulation + ).build() + } + + return this + } + + get simulationData(): { + result: SorobanRpc.Api.SimulateHostFunctionResult + transactionData: xdr.SorobanTransactionData + } { + if (this.simulationResult && this.simulationTransactionData) { + return { + result: this.simulationResult, + transactionData: this.simulationTransactionData, + } + } + // else, we know we just did the simulation on this machine + const simulation = this.simulation! + if (SorobanRpc.Api.isSimulationError(simulation)) { + throw new Error(`Transaction simulation failed: "${simulation.error}"`) + } + + if (SorobanRpc.Api.isSimulationRestore(simulation)) { + throw new ExpiredStateError(`You need to restore some contract state before you can invoke this method. ${JSON.stringify(simulation, null, 2)}`) + } + + if (!simulation.result) { + throw new Error(`Expected an invocation simulation, but got no 'result' field. Simulation: ${JSON.stringify(simulation, null, 2)}`) + } + + // add to object for serialization & deserialization + this.simulationResult = simulation.result + this.simulationTransactionData = simulation.transactionData.build() + + return { + result: this.simulationResult, + transactionData: this.simulationTransactionData!, + } + } + + get result(): T { + try { + return this.options.parseResultXdr(this.simulationData.result.retval) + } catch (e) { + let err = this.parseError(e.toString()) + if (err) return err as T + throw e + } + } + + parseError(errorMessage: string): Err | undefined { + if (!this.options.errorTypes) return + const match = errorMessage.match(contractErrorPattern) + if (!match) return + let i = parseInt(match[1], 10) + let err = this.options.errorTypes[i] + if (err) return new Err(err) + } + + getWallet = async (): Promise => { + return this.options.wallet ?? (await import("@stellar/freighter-api")).default + } + + getPublicKey = async (): Promise => { + const wallet = await this.getWallet() + if (await wallet.isConnected() && await wallet.isAllowed()) { + return (await wallet.getUserInfo()).publicKey + } + } + + /** + * Get account details from the Soroban network for the publicKey currently + * selected in user's wallet. If not connected to Freighter, use placeholder + * null account. + */ + getAccount = async (): Promise => { + const publicKey = await this.getPublicKey() + return publicKey + ? await this.server.getAccount(publicKey) + : new Account(NULL_ACCOUNT, "0") + } + + /** + * Sign the transaction with the `wallet` (default Freighter), then send to + * the network and return a `SentTransaction` that keeps track of all the + * attempts to send and fetch the transaction from the network. + */ + signAndSend = async ({ secondsToWait = 10, force = false }: { + /** + * Wait `secondsToWait` seconds (default: 10) for both the transaction to SEND successfully (will keep trying if the server returns `TRY_AGAIN_LATER`), as well as for the transaction to COMPLETE (will keep checking if the server returns `PENDING`). + */ + secondsToWait?: number + /** + * If `true`, sign and send the transaction even if it is a read call. + */ + force?: boolean + } = {}): Promise> => { + if (!this.raw) { + throw new Error('Transaction has not yet been simulated') + } + + if (!force && this.isReadCall) { + throw new Error('This is a read call. It requires no signature or sending. Use `force: true` to sign and send anyway.') + } + + if (!await this.hasRealInvoker()) { + throw new WalletDisconnectedError('Wallet is not connected') + } + + if (this.raw.source !== (await this.getAccount()).accountId()) { + throw new Error(`You must submit the transaction with the account that originally created it. Please switch to the wallet with "${this.raw.source}" as its public key.`) + } + + if ((await this.needsNonInvokerSigningBy()).length) { + throw new NeedsMoreSignaturesError( + 'Transaction requires more signatures. See `needsNonInvokerSigningBy` for details.' + ) + } + + return await SentTransaction.init(this.options, this, secondsToWait); + } + + getStorageExpiration = async () => { + const entryRes = await this.server.getLedgerEntries( + new Contract(this.options.contractId).getFootprint() + ) + if ( + !entryRes.entries || + !entryRes.entries.length || + !entryRes.entries[0].liveUntilLedgerSeq + ) throw new Error('failed to get ledger entry') + return entryRes.entries[0].liveUntilLedgerSeq + } + + /** + * Get a list of accounts, other than the invoker of the simulation, that + * need to sign auth entries in this transaction. + * + * Soroban allows multiple people to sign a transaction. Someone needs to + * sign the final transaction envelope; this person/account is called the + * _invoker_, or _source_. Other accounts might need to sign individual auth + * entries in the transaction, if they're not also the invoker. + * + * This function returns a list of accounts that need to sign auth entries, + * assuming that the same invoker/source account will sign the final + * transaction envelope as signed the initial simulation. + * + * One at a time, for each public key in this array, you will need to + * serialize this transaction with `toJSON`, send to the owner of that key, + * deserialize the transaction with `txFromJson`, and call + * {@link signAuthEntries}. Then re-serialize and send to the next account + * in this list. + */ + needsNonInvokerSigningBy = async ({ + includeAlreadySigned = false, + }: { + /** + * Whether or not to include auth entries that have already been signed. Default: false + */ + includeAlreadySigned?: boolean + } = {}): Promise => { + if (!this.raw) { + throw new Error('Transaction has not yet been simulated') + } + + // We expect that any transaction constructed by these libraries has a + // single operation, which is an InvokeHostFunction operation. The host + // function being invoked is the contract method call. + if (!("operations" in this.raw)) { + throw new Error( + `Unexpected Transaction type; no operations: ${JSON.stringify(this.raw) + }` + ) + } + const rawInvokeHostFunctionOp = this.raw + .operations[0] as Operation.InvokeHostFunction + + return [...new Set((rawInvokeHostFunctionOp.auth ?? []).filter(entry => + entry.credentials().switch() === + xdr.SorobanCredentialsType.sorobanCredentialsAddress() && + ( + includeAlreadySigned || + entry.credentials().address().signature().switch().name === 'scvVoid' + ) + ).map(entry => StrKey.encodeEd25519PublicKey( + entry.credentials().address().address().accountId().ed25519() + )))] + } + + preImageFor( + entry: xdr.SorobanAuthorizationEntry, + signatureExpirationLedger: number + ): xdr.HashIdPreimage { + const addrAuth = entry.credentials().address() + return xdr.HashIdPreimage.envelopeTypeSorobanAuthorization( + new xdr.HashIdPreimageSorobanAuthorization({ + networkId: hash(Buffer.from(this.options.networkPassphrase)), + nonce: addrAuth.nonce(), + invocation: entry.rootInvocation(), + signatureExpirationLedger, + }), + ) + } + + /** + * If {@link needsNonInvokerSigningBy} returns a non-empty list, you can serialize + * the transaction with `toJSON`, send it to the owner of one of the public keys + * in the map, deserialize with `txFromJSON`, and call this method on their + * machine. Internally, this will use `signAuthEntry` function from connected + * `wallet` for each. + * + * Then, re-serialize the transaction and either send to the next + * `needsNonInvokerSigningBy` owner, or send it back to the original account + * who simulated the transaction so they can {@link sign} the transaction + * envelope and {@link send} it to the network. + * + * Sending to all `needsNonInvokerSigningBy` owners in parallel is not currently + * supported! + */ + signAuthEntries = async ( + /** + * When to set each auth entry to expire. Could be any number of blocks in + * the future. Can be supplied as a promise or a raw number. Default: + * contract's current `persistent` storage expiration date/ledger + * number/block. + */ + expiration: number | Promise = this.getStorageExpiration() + ): Promise => { + if (!this.raw) throw new Error('Transaction has not yet been assembled or simulated') + const needsNonInvokerSigningBy = await this.needsNonInvokerSigningBy() + + if (!needsNonInvokerSigningBy) throw new NoUnsignedNonInvokerAuthEntriesError('No unsigned non-invoker auth entries; maybe you already signed?') + const publicKey = await this.getPublicKey() + if (!publicKey) throw new Error('Could not get public key from wallet; maybe Freighter is not signed in?') + if (needsNonInvokerSigningBy.indexOf(publicKey) === -1) throw new Error(`No auth entries for public key "${publicKey}"`) + const wallet = await this.getWallet() + + const rawInvokeHostFunctionOp = this.raw + .operations[0] as Operation.InvokeHostFunction + + const authEntries = rawInvokeHostFunctionOp.auth ?? [] + + for (const [i, entry] of authEntries.entries()) { + if ( + entry.credentials().switch() !== + xdr.SorobanCredentialsType.sorobanCredentialsAddress() + ) { + // if the invoker/source account, then the entry doesn't need explicit + // signature, since the tx envelope is already signed by the source + // account, so only check for sorobanCredentialsAddress + continue + } + const pk = StrKey.encodeEd25519PublicKey( + entry.credentials().address().address().accountId().ed25519() + ) + + // this auth entry needs to be signed by a different account + // (or maybe already was!) + if (pk !== publicKey) continue + + authEntries[i] = await authorizeEntry( + entry, + async preimage => Buffer.from( + await wallet.signAuthEntry(preimage.toXDR('base64')), + 'base64' + ), + await expiration, + this.options.networkPassphrase + ) + } + } + + get isReadCall(): boolean { + const authsCount = this.simulationData.result.auth.length; + const writeLength = this.simulationData.transactionData.resources().footprint().readWrite().length + return (authsCount === 0) && (writeLength === 0); + } + + hasRealInvoker = async (): Promise => { + const account = await this.getAccount() + return account.accountId() !== NULL_ACCOUNT + } +} + +/** + * A transaction that has been sent to the Soroban network. This happens in two steps: + * + * 1. `sendTransaction`: initial submission of the transaction to the network. + * This step can run into problems, and will be retried with exponential + * backoff if it does. See all attempts in `sendTransactionResponseAll` and the + * most recent attempt in `sendTransactionResponse`. + * 2. `getTransaction`: once the transaction has been submitted to the network + * successfully, you need to wait for it to finalize to get the results of the + * transaction. This step can also run into problems, and will be retried with + * exponential backoff if it does. See all attempts in + * `getTransactionResponseAll` and the most recent attempt in + * `getTransactionResponse`. + */ +class SentTransaction { + public server: SorobanRpc.Server + public signed: Tx + public sendTransactionResponse?: SendTx + public sendTransactionResponseAll?: SendTx[] + public getTransactionResponse?: GetTx + public getTransactionResponseAll?: GetTx[] + + constructor(public options: AssembledTransactionOptions, public assembled: AssembledTransaction) { + this.server = new SorobanRpc.Server(this.options.rpcUrl, { + allowHttp: this.options.rpcUrl.startsWith("http://"), + }); + this.assembled = assembled + } + + static init = async ( + options: AssembledTransactionOptions, + assembled: AssembledTransaction, + secondsToWait: number = 10 + ): Promise> => { + const tx = new SentTransaction(options, assembled) + return await tx.send(secondsToWait) + } + + private send = async (secondsToWait: number = 10): Promise => { + const wallet = await this.assembled.getWallet() + + this.sendTransactionResponseAll = await withExponentialBackoff( + async (previousFailure) => { + if (previousFailure) { + // Increment transaction sequence number and resimulate before trying again + + // Soroban transaction can only have 1 operation + const op = this.assembled.raw.operations[0] as Operation.InvokeHostFunction; + + this.assembled.raw = new TransactionBuilder(await this.assembled.getAccount(), { + fee: this.assembled.raw.fee, + networkPassphrase: this.options.networkPassphrase, + }) + .setTimeout(TimeoutInfinite) + .addOperation( + Operation.invokeHostFunction({ ...op, auth: op.auth ?? [] }), + ) + .build() + + await this.assembled.simulate() + } + + const signature = await wallet.signTransaction(this.assembled.raw.toXDR(), { + networkPassphrase: this.options.networkPassphrase, + }); + + this.signed = TransactionBuilder.fromXDR( + signature, + this.options.networkPassphrase + ) as Tx + + return this.server.sendTransaction(this.signed) + }, + resp => resp.status !== "PENDING", + secondsToWait + ) + + this.sendTransactionResponse = this.sendTransactionResponseAll[this.sendTransactionResponseAll.length - 1] + + if (this.sendTransactionResponse.status !== "PENDING") { + throw new Error( + `Tried to resubmit transaction for ${secondsToWait + } seconds, but it's still failing. ` + + `All attempts: ${JSON.stringify( + this.sendTransactionResponseAll, + null, + 2 + )}` + ); + } + + const { hash } = this.sendTransactionResponse + + this.getTransactionResponseAll = await withExponentialBackoff( + () => this.server.getTransaction(hash), + resp => resp.status === SorobanRpc.Api.GetTransactionStatus.NOT_FOUND, + secondsToWait + ) + + this.getTransactionResponse = this.getTransactionResponseAll[this.getTransactionResponseAll.length - 1] + if (this.getTransactionResponse.status === SorobanRpc.Api.GetTransactionStatus.NOT_FOUND) { + console.error( + `Waited ${secondsToWait + } seconds for transaction to complete, but it did not. ` + + `Returning anyway. Check the transaction status manually. ` + + `Sent transaction: ${JSON.stringify( + this.sendTransactionResponse, + null, + 2 + )}\n` + + `All attempts to get the result: ${JSON.stringify( + this.getTransactionResponseAll, + null, + 2 + )}` + ); + } + + return this; + } + + get result(): T { + // 1. check if transaction was submitted and awaited with `getTransaction` + if ( + "getTransactionResponse" in this && + this.getTransactionResponse + ) { + // getTransactionResponse has a `returnValue` field unless it failed + if ("returnValue" in this.getTransactionResponse) { + return this.options.parseResultXdr(this.getTransactionResponse.returnValue!) + } + + // if "returnValue" not present, the transaction failed; return without parsing the result + throw new Error("Transaction failed! Cannot parse result.") + } + + // 2. otherwise, maybe it was merely sent with `sendTransaction` + if (this.sendTransactionResponse) { + const errorResult = this.sendTransactionResponse.errorResult?.result() + if (errorResult) { + throw new SendFailedError( + `Transaction simulation looked correct, but attempting to send the transaction failed. Check \`simulation\` and \`sendTransactionResponseAll\` to troubleshoot. Decoded \`sendTransactionResponse.errorResultXdr\`: ${errorResult}` + ) + } + throw new SendResultOnlyError( + `Transaction was sent to the network, but not yet awaited. No result to show. Await transaction completion with \`getTransaction(sendTransactionResponse.hash)\`` + ) + } + + // 3. finally, if neither of those are present, throw an error + throw new Error(`Sending transaction failed: ${JSON.stringify(this.assembled)}`) + } +} + +/** + * Keep calling a `fn` for `secondsToWait` seconds, if `keepWaitingIf` is true. + * Returns an array of all attempts to call the function. + */ +async function withExponentialBackoff( + fn: (previousFailure?: T) => Promise, + keepWaitingIf: (result: T) => boolean, + secondsToWait: number, + exponentialFactor = 1.5, + verbose = false, +): Promise { + const attempts: T[] = [] + + let count = 0 + attempts.push(await fn()) + if (!keepWaitingIf(attempts[attempts.length - 1])) return attempts + + const waitUntil = new Date(Date.now() + secondsToWait * 1000).valueOf() + let waitTime = 1000 + let totalWaitTime = waitTime + + while (Date.now() < waitUntil && keepWaitingIf(attempts[attempts.length - 1])) { + count++ + // Wait a beat + if (verbose) { + console.info(`Waiting ${waitTime}ms before trying again (bringing the total wait time to ${totalWaitTime}ms so far, of total ${secondsToWait * 1000}ms)`) + } + await new Promise(res => setTimeout(res, waitTime)) + // Exponential backoff + waitTime = waitTime * exponentialFactor; + if (new Date(Date.now() + waitTime).valueOf() > waitUntil) { + waitTime = waitUntil - Date.now() + if (verbose) { + console.info(`was gonna wait too long; new waitTime: ${waitTime}ms`) + } + } + totalWaitTime = waitTime + totalWaitTime + // Try again + attempts.push(await fn(attempts[attempts.length - 1])) + if (verbose && keepWaitingIf(attempts[attempts.length - 1])) { + console.info( + `${count}. Called ${fn}; ${attempts.length + } prev attempts. Most recent: ${JSON.stringify(attempts[attempts.length - 1], null, 2) + }` + ) + } + } + + return attempts +} diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/src/index.ts b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/src/index.ts new file mode 100644 index 00000000..e3119225 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/src/index.ts @@ -0,0 +1,758 @@ +import { ContractSpec, Address } from '@stellar/stellar-sdk'; +import { Buffer } from "buffer"; +import { AssembledTransaction, Ok, Err } from './assembled-tx.js'; +import type { + u32, + i32, + u64, + i64, + u128, + i128, + u256, + i256, + Option, + Typepoint, + Duration, + Error_, + Result, +} from './assembled-tx.js'; +import type { ClassOptions, XDR_BASE64 } from './method-options.js'; + +export * from './assembled-tx.js'; +export * from './method-options.js'; + +if (typeof window !== 'undefined') { + //@ts-ignore Buffer exists + window.Buffer = window.Buffer || Buffer; +} + + +export const networks = { + futurenet: { + networkPassphrase: "Test SDF Future Network ; October 2022", + contractId: "CBYMYMSDF6FBDNCFJCRC7KMO4REYFPOH2U4N7FXI3GJO6YXNCQ43CDSK", + } +} as const + +/** + This is from the rust doc above the struct Test + */ +export interface Test { + /** + + */ +a: u32; + /** + + */ +b: boolean; + /** + + */ +c: string; +} + +/** + + */ +export type SimpleEnum = {tag: "First", values: void} | {tag: "Second", values: void} | {tag: "Third", values: void}; + +/** + + */ +export enum RoyalCard { + Jack = 11, + Queen = 12, + King = 13, +} + +/** + + */ +export type TupleStruct = readonly [Test, SimpleEnum]; +/** + + */ +export type ComplexEnum = {tag: "Struct", values: readonly [Test]} | {tag: "Tuple", values: readonly [TupleStruct]} | {tag: "Enum", values: readonly [SimpleEnum]} | {tag: "Asset", values: readonly [string, i128]} | {tag: "Void", values: void}; + +/** + + */ +export const Errors = { +1: {message:"Please provide an odd number"} +} + +export class Contract { + spec: ContractSpec; + constructor(public readonly options: ClassOptions) { + this.spec = new ContractSpec([ + "AAAAAQAAAC9UaGlzIGlzIGZyb20gdGhlIHJ1c3QgZG9jIGFib3ZlIHRoZSBzdHJ1Y3QgVGVzdAAAAAAAAAAABFRlc3QAAAADAAAAAAAAAAFhAAAAAAAABAAAAAAAAAABYgAAAAAAAAEAAAAAAAAAAWMAAAAAAAAR", + "AAAAAgAAAAAAAAAAAAAAClNpbXBsZUVudW0AAAAAAAMAAAAAAAAAAAAAAAVGaXJzdAAAAAAAAAAAAAAAAAAABlNlY29uZAAAAAAAAAAAAAAAAAAFVGhpcmQAAAA=", + "AAAAAwAAAAAAAAAAAAAACVJveWFsQ2FyZAAAAAAAAAMAAAAAAAAABEphY2sAAAALAAAAAAAAAAVRdWVlbgAAAAAAAAwAAAAAAAAABEtpbmcAAAAN", + "AAAAAQAAAAAAAAAAAAAAC1R1cGxlU3RydWN0AAAAAAIAAAAAAAAAATAAAAAAAAfQAAAABFRlc3QAAAAAAAAAATEAAAAAAAfQAAAAClNpbXBsZUVudW0AAA==", + "AAAAAgAAAAAAAAAAAAAAC0NvbXBsZXhFbnVtAAAAAAUAAAABAAAAAAAAAAZTdHJ1Y3QAAAAAAAEAAAfQAAAABFRlc3QAAAABAAAAAAAAAAVUdXBsZQAAAAAAAAEAAAfQAAAAC1R1cGxlU3RydWN0AAAAAAEAAAAAAAAABEVudW0AAAABAAAH0AAAAApTaW1wbGVFbnVtAAAAAAABAAAAAAAAAAVBc3NldAAAAAAAAAIAAAATAAAACwAAAAAAAAAAAAAABFZvaWQ=", + "AAAABAAAAAAAAAAAAAAABUVycm9yAAAAAAAAAQAAABxQbGVhc2UgcHJvdmlkZSBhbiBvZGQgbnVtYmVyAAAAD051bWJlck11c3RCZU9kZAAAAAAB", + "AAAAAAAAAAAAAAAFaGVsbG8AAAAAAAABAAAAAAAAAAVoZWxsbwAAAAAAABEAAAABAAAAEQ==", + "AAAAAAAAAAAAAAAEd29pZAAAAAAAAAAA", + "AAAAAAAAAAAAAAADdmFsAAAAAAAAAAABAAAAAA==", + "AAAAAAAAAAAAAAAQdTMyX2ZhaWxfb25fZXZlbgAAAAEAAAAAAAAABHUzMl8AAAAEAAAAAQAAA+kAAAAEAAAAAw==", + "AAAAAAAAAAAAAAAEdTMyXwAAAAEAAAAAAAAABHUzMl8AAAAEAAAAAQAAAAQ=", + "AAAAAAAAAAAAAAAEaTMyXwAAAAEAAAAAAAAABGkzMl8AAAAFAAAAAQAAAAU=", + "AAAAAAAAAAAAAAAEaTY0XwAAAAEAAAAAAAAABGk2NF8AAAAHAAAAAQAAAAc=", + "AAAAAAAAACxFeGFtcGxlIGNvbnRyYWN0IG1ldGhvZCB3aGljaCB0YWtlcyBhIHN0cnVjdAAAAApzdHJ1a3RfaGVsAAAAAAABAAAAAAAAAAZzdHJ1a3QAAAAAB9AAAAAEVGVzdAAAAAEAAAPqAAAAEQ==", + "AAAAAAAAAAAAAAAGc3RydWt0AAAAAAABAAAAAAAAAAZzdHJ1a3QAAAAAB9AAAAAEVGVzdAAAAAEAAAfQAAAABFRlc3Q=", + "AAAAAAAAAAAAAAAGc2ltcGxlAAAAAAABAAAAAAAAAAZzaW1wbGUAAAAAB9AAAAAKU2ltcGxlRW51bQAAAAAAAQAAB9AAAAAKU2ltcGxlRW51bQAA", + "AAAAAAAAAAAAAAAHY29tcGxleAAAAAABAAAAAAAAAAdjb21wbGV4AAAAB9AAAAALQ29tcGxleEVudW0AAAAAAQAAB9AAAAALQ29tcGxleEVudW0A", + "AAAAAAAAAAAAAAAIYWRkcmVzc2UAAAABAAAAAAAAAAhhZGRyZXNzZQAAABMAAAABAAAAEw==", + "AAAAAAAAAAAAAAAFYnl0ZXMAAAAAAAABAAAAAAAAAAVieXRlcwAAAAAAAA4AAAABAAAADg==", + "AAAAAAAAAAAAAAAHYnl0ZXNfbgAAAAABAAAAAAAAAAdieXRlc19uAAAAA+4AAAAJAAAAAQAAA+4AAAAJ", + "AAAAAAAAAAAAAAAEY2FyZAAAAAEAAAAAAAAABGNhcmQAAAfQAAAACVJveWFsQ2FyZAAAAAAAAAEAAAfQAAAACVJveWFsQ2FyZAAAAA==", + "AAAAAAAAAAAAAAAHYm9vbGVhbgAAAAABAAAAAAAAAAdib29sZWFuAAAAAAEAAAABAAAAAQ==", + "AAAAAAAAABdOZWdhdGVzIGEgYm9vbGVhbiB2YWx1ZQAAAAADbm90AAAAAAEAAAAAAAAAB2Jvb2xlYW4AAAAAAQAAAAEAAAAB", + "AAAAAAAAAAAAAAAEaTEyOAAAAAEAAAAAAAAABGkxMjgAAAALAAAAAQAAAAs=", + "AAAAAAAAAAAAAAAEdTEyOAAAAAEAAAAAAAAABHUxMjgAAAAKAAAAAQAAAAo=", + "AAAAAAAAAAAAAAAKbXVsdGlfYXJncwAAAAAAAgAAAAAAAAABYQAAAAAAAAQAAAAAAAAAAWIAAAAAAAABAAAAAQAAAAQ=", + "AAAAAAAAAAAAAAADbWFwAAAAAAEAAAAAAAAAA21hcAAAAAPsAAAABAAAAAEAAAABAAAD7AAAAAQAAAAB", + "AAAAAAAAAAAAAAADdmVjAAAAAAEAAAAAAAAAA3ZlYwAAAAPqAAAABAAAAAEAAAPqAAAABA==", + "AAAAAAAAAAAAAAAFdHVwbGUAAAAAAAABAAAAAAAAAAV0dXBsZQAAAAAAA+0AAAACAAAAEQAAAAQAAAABAAAD7QAAAAIAAAARAAAABA==", + "AAAAAAAAAB9FeGFtcGxlIG9mIGFuIG9wdGlvbmFsIGFyZ3VtZW50AAAAAAZvcHRpb24AAAAAAAEAAAAAAAAABm9wdGlvbgAAAAAD6AAAAAQAAAABAAAD6AAAAAQ=", + "AAAAAAAAAAAAAAAEdTI1NgAAAAEAAAAAAAAABHUyNTYAAAAMAAAAAQAAAAw=", + "AAAAAAAAAAAAAAAEaTI1NgAAAAEAAAAAAAAABGkyNTYAAAANAAAAAQAAAA0=", + "AAAAAAAAAAAAAAAGc3RyaW5nAAAAAAABAAAAAAAAAAZzdHJpbmcAAAAAABAAAAABAAAAEA==", + "AAAAAAAAAAAAAAAMdHVwbGVfc3RydWt0AAAAAQAAAAAAAAAMdHVwbGVfc3RydWt0AAAH0AAAAAtUdXBsZVN0cnVjdAAAAAABAAAH0AAAAAtUdXBsZVN0cnVjdAA=" + ]); + } + private readonly parsers = { + hello: (result: XDR_BASE64): string => this.spec.funcResToNative("hello", result), + woid: () => {}, + val: (result: XDR_BASE64): any => this.spec.funcResToNative("val", result), + u32FailOnEven: (result: XDR_BASE64 | Err): Ok | Err => { + if (result instanceof Err) return result + return new Ok(this.spec.funcResToNative("u32_fail_on_even", result)) + }, + u32: (result: XDR_BASE64): u32 => this.spec.funcResToNative("u32_", result), + i32: (result: XDR_BASE64): i32 => this.spec.funcResToNative("i32_", result), + i64: (result: XDR_BASE64): i64 => this.spec.funcResToNative("i64_", result), + struktHel: (result: XDR_BASE64): Array => this.spec.funcResToNative("strukt_hel", result), + strukt: (result: XDR_BASE64): Test => this.spec.funcResToNative("strukt", result), + simple: (result: XDR_BASE64): SimpleEnum => this.spec.funcResToNative("simple", result), + complex: (result: XDR_BASE64): ComplexEnum => this.spec.funcResToNative("complex", result), + addresse: (result: XDR_BASE64): string => this.spec.funcResToNative("addresse", result), + bytes: (result: XDR_BASE64): Buffer => this.spec.funcResToNative("bytes", result), + bytesN: (result: XDR_BASE64): Buffer => this.spec.funcResToNative("bytes_n", result), + card: (result: XDR_BASE64): RoyalCard => this.spec.funcResToNative("card", result), + boolean: (result: XDR_BASE64): boolean => this.spec.funcResToNative("boolean", result), + not: (result: XDR_BASE64): boolean => this.spec.funcResToNative("not", result), + i128: (result: XDR_BASE64): i128 => this.spec.funcResToNative("i128", result), + u128: (result: XDR_BASE64): u128 => this.spec.funcResToNative("u128", result), + multiArgs: (result: XDR_BASE64): u32 => this.spec.funcResToNative("multi_args", result), + map: (result: XDR_BASE64): Map => this.spec.funcResToNative("map", result), + vec: (result: XDR_BASE64): Array => this.spec.funcResToNative("vec", result), + tuple: (result: XDR_BASE64): readonly [string, u32] => this.spec.funcResToNative("tuple", result), + option: (result: XDR_BASE64): Option => this.spec.funcResToNative("option", result), + u256: (result: XDR_BASE64): u256 => this.spec.funcResToNative("u256", result), + i256: (result: XDR_BASE64): i256 => this.spec.funcResToNative("i256", result), + string: (result: XDR_BASE64): string => this.spec.funcResToNative("string", result), + tupleStrukt: (result: XDR_BASE64): TupleStruct => this.spec.funcResToNative("tuple_strukt", result) + }; + private txFromJSON = (json: string): AssembledTransaction => { + const { method, ...tx } = JSON.parse(json) + return AssembledTransaction.fromJSON( + { + ...this.options, + method, + parseResultXdr: this.parsers[method], + }, + tx, + ); + } + public readonly fromJSON = { + hello: this.txFromJSON>, + woid: this.txFromJSON>, + val: this.txFromJSON>, + u32FailOnEven: this.txFromJSON>, + u32: this.txFromJSON>, + i32: this.txFromJSON>, + i64: this.txFromJSON>, + struktHel: this.txFromJSON>, + strukt: this.txFromJSON>, + simple: this.txFromJSON>, + complex: this.txFromJSON>, + addresse: this.txFromJSON>, + bytes: this.txFromJSON>, + bytesN: this.txFromJSON>, + card: this.txFromJSON>, + boolean: this.txFromJSON>, + not: this.txFromJSON>, + i128: this.txFromJSON>, + u128: this.txFromJSON>, + multiArgs: this.txFromJSON>, + map: this.txFromJSON>, + vec: this.txFromJSON>, + tuple: this.txFromJSON>, + option: this.txFromJSON>, + u256: this.txFromJSON>, + i256: this.txFromJSON>, + string: this.txFromJSON>, + tupleStrukt: this.txFromJSON> + } + /** + * Construct and simulate a hello transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + hello = async ({hello}: {hello: string}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'hello', + args: this.spec.funcArgsToScVals("hello", {hello}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['hello'], + }); + } + + + /** + * Construct and simulate a woid transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + woid = async (options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'woid', + args: this.spec.funcArgsToScVals("woid", {}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['woid'], + }); + } + + + /** + * Construct and simulate a val transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + val = async (options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'val', + args: this.spec.funcArgsToScVals("val", {}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['val'], + }); + } + + + /** + * Construct and simulate a u32_fail_on_even transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + u32FailOnEven = async ({u32_}: {u32_: u32}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'u32_fail_on_even', + args: this.spec.funcArgsToScVals("u32_fail_on_even", {u32_}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['u32FailOnEven'], + }); + } + + + /** + * Construct and simulate a u32_ transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + u32 = async ({u32_}: {u32_: u32}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'u32_', + args: this.spec.funcArgsToScVals("u32_", {u32_}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['u32'], + }); + } + + + /** + * Construct and simulate a i32_ transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + i32 = async ({i32_}: {i32_: i32}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'i32_', + args: this.spec.funcArgsToScVals("i32_", {i32_}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['i32'], + }); + } + + + /** + * Construct and simulate a i64_ transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + i64 = async ({i64_}: {i64_: i64}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'i64_', + args: this.spec.funcArgsToScVals("i64_", {i64_}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['i64'], + }); + } + + + /** + * Construct and simulate a strukt_hel transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object.Example contract method which takes a struct + */ + struktHel = async ({strukt}: {strukt: Test}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'strukt_hel', + args: this.spec.funcArgsToScVals("strukt_hel", {strukt}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['struktHel'], + }); + } + + + /** + * Construct and simulate a strukt transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + strukt = async ({strukt}: {strukt: Test}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'strukt', + args: this.spec.funcArgsToScVals("strukt", {strukt}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['strukt'], + }); + } + + + /** + * Construct and simulate a simple transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + simple = async ({simple}: {simple: SimpleEnum}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'simple', + args: this.spec.funcArgsToScVals("simple", {simple}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['simple'], + }); + } + + + /** + * Construct and simulate a complex transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + complex = async ({complex}: {complex: ComplexEnum}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'complex', + args: this.spec.funcArgsToScVals("complex", {complex}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['complex'], + }); + } + + + /** + * Construct and simulate a addresse transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + addresse = async ({addresse}: {addresse: string}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'addresse', + args: this.spec.funcArgsToScVals("addresse", {addresse: new Address(addresse)}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['addresse'], + }); + } + + + /** + * Construct and simulate a bytes transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + bytes = async ({bytes}: {bytes: Buffer}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'bytes', + args: this.spec.funcArgsToScVals("bytes", {bytes}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['bytes'], + }); + } + + + /** + * Construct and simulate a bytes_n transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + bytesN = async ({bytes_n}: {bytes_n: Buffer}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'bytes_n', + args: this.spec.funcArgsToScVals("bytes_n", {bytes_n}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['bytesN'], + }); + } + + + /** + * Construct and simulate a card transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + card = async ({card}: {card: RoyalCard}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'card', + args: this.spec.funcArgsToScVals("card", {card}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['card'], + }); + } + + + /** + * Construct and simulate a boolean transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + boolean = async ({boolean}: {boolean: boolean}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'boolean', + args: this.spec.funcArgsToScVals("boolean", {boolean}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['boolean'], + }); + } + + + /** + * Construct and simulate a not transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object.Negates a boolean value + */ + not = async ({boolean}: {boolean: boolean}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'not', + args: this.spec.funcArgsToScVals("not", {boolean}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['not'], + }); + } + + + /** + * Construct and simulate a i128 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + i128 = async ({i128}: {i128: i128}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'i128', + args: this.spec.funcArgsToScVals("i128", {i128}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['i128'], + }); + } + + + /** + * Construct and simulate a u128 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + u128 = async ({u128}: {u128: u128}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'u128', + args: this.spec.funcArgsToScVals("u128", {u128}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['u128'], + }); + } + + + /** + * Construct and simulate a multi_args transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + multiArgs = async ({a, b}: {a: u32, b: boolean}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'multi_args', + args: this.spec.funcArgsToScVals("multi_args", {a, b}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['multiArgs'], + }); + } + + + /** + * Construct and simulate a map transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + map = async ({map}: {map: Map}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'map', + args: this.spec.funcArgsToScVals("map", {map}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['map'], + }); + } + + + /** + * Construct and simulate a vec transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + vec = async ({vec}: {vec: Array}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'vec', + args: this.spec.funcArgsToScVals("vec", {vec}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['vec'], + }); + } + + + /** + * Construct and simulate a tuple transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + tuple = async ({tuple}: {tuple: readonly [string, u32]}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'tuple', + args: this.spec.funcArgsToScVals("tuple", {tuple}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['tuple'], + }); + } + + + /** + * Construct and simulate a option transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object.Example of an optional argument + */ + option = async ({option}: {option: Option}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'option', + args: this.spec.funcArgsToScVals("option", {option}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['option'], + }); + } + + + /** + * Construct and simulate a u256 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + u256 = async ({u256}: {u256: u256}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'u256', + args: this.spec.funcArgsToScVals("u256", {u256}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['u256'], + }); + } + + + /** + * Construct and simulate a i256 transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + i256 = async ({i256}: {i256: i256}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'i256', + args: this.spec.funcArgsToScVals("i256", {i256}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['i256'], + }); + } + + + /** + * Construct and simulate a string transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + string = async ({string}: {string: string}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'string', + args: this.spec.funcArgsToScVals("string", {string}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['string'], + }); + } + + + /** + * Construct and simulate a tuple_strukt transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object. + */ + tupleStrukt = async ({tuple_strukt}: {tuple_strukt: TupleStruct}, options: { + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + } = {}) => { + return await AssembledTransaction.fromSimulation({ + method: 'tuple_strukt', + args: this.spec.funcArgsToScVals("tuple_strukt", {tuple_strukt}), + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['tupleStrukt'], + }); + } + +} \ No newline at end of file diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/src/method-options.ts b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/src/method-options.ts new file mode 100644 index 00000000..737ae0a0 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/src/method-options.ts @@ -0,0 +1,50 @@ +// defined this way so typeahead shows full union, not named alias +let responseTypes: 'simulated' | 'full' | undefined +export type ResponseTypes = typeof responseTypes + +export type XDR_BASE64 = string + +export interface Wallet { + isConnected: () => Promise, + isAllowed: () => Promise, + getUserInfo: () => Promise<{ publicKey?: string }>, + signTransaction: (tx: XDR_BASE64, opts?: { + network?: string, + networkPassphrase?: string, + accountToSign?: string, + }) => Promise, + signAuthEntry: ( + entryXdr: XDR_BASE64, + opts?: { + accountToSign?: string; + } + ) => Promise +} + +export type ClassOptions = { + contractId: string + networkPassphrase: string + rpcUrl: string + errorTypes?: Record + /** + * A Wallet interface, such as Freighter, that has the methods `isConnected`, `isAllowed`, `getUserInfo`, and `signTransaction`. If not provided, will attempt to import and use Freighter. Example: + * + * @example + * ```ts + * import freighter from "@stellar/freighter-api"; + * import { Contract } from "test_custom_types"; + * const contract = new Contract({ + * …, + * wallet: freighter, + * }) + * ``` + */ + wallet?: Wallet +} + +export type MethodOptions = { + /** + * The fee to pay for the transaction. Default: soroban-sdk's BASE_FEE ('100') + */ + fee?: number +} diff --git a/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/tsconfig.json b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/tsconfig.json new file mode 100644 index 00000000..efd4c619 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/fixtures/test_custom_types/tsconfig.json @@ -0,0 +1,98 @@ +{ + "compilerOptions": { + /* Visit https://aka.ms/tsconfig to read more about this file */ + /* Projects */ + // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ + // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ + // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ + // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ + // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ + // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ + /* Language and Environment */ + "target": "ESNext", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ + // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + // "jsx": "preserve", /* Specify what JSX code is generated. */ + // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ + // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ + // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ + // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ + // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ + // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ + // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ + // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ + // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ + /* Modules */ + "module": "ESNext", /* Specify what module code is generated. */ + // "rootDir": "./", /* Specify the root folder within your source files. */ + "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */ + // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ + // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ + // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ + // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ + // "types": [], /* Specify type package names to be included without being referenced in a source file. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ + // "resolveJsonModule": true, /* Enable importing .json files. */ + // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ + /* JavaScript Support */ + // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ + // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ + // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ + /* Emit */ + "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ + // "declarationMap": true, /* Create sourcemaps for d.ts files. */ + // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ + // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ + // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ + "outDir": "./dist", /* Specify an output folder for all emitted files. */ + // "removeComments": true, /* Disable emitting comments. */ + // "noEmit": true, /* Disable emitting files from a compilation. */ + // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ + // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ + // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ + // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ + // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ + // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ + // "newLine": "crlf", /* Set the newline character for emitting files. */ + // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ + // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ + // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ + // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ + // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ + // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ + /* Interop Constraints */ + // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ + // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ + // "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ + // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ + // "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ + /* Type Checking */ + // "strict": true, /* Enable all strict type-checking options. */ + // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ + "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ + // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ + // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ + // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ + // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ + // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ + // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ + // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ + // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ + // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ + // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ + // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ + // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ + // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ + // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ + // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ + // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ + /* Completeness */ + // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ + "skipLibCheck": true /* Skip type checking all .d.ts files. */ + }, + "include": [ + "src/*" + ] +} diff --git a/cmd/crates/soroban-spec-typescript/src/boilerplate.rs b/cmd/crates/soroban-spec-typescript/src/boilerplate.rs new file mode 100644 index 00000000..7ffe6418 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/src/boilerplate.rs @@ -0,0 +1,218 @@ +#![allow(non_snake_case)] +use heck::{ToLowerCamelCase, ToShoutySnakeCase}; +use include_dir::{include_dir, Dir}; +use std::{ + fs, + io::Write, + path::{Path, PathBuf}, +}; +use stellar_xdr::curr::ScSpecEntry; + +use super::generate; + +static PROJECT_DIR: Dir<'_> = include_dir!("$CARGO_MANIFEST_DIR/src/project_template"); + +const NETWORK_PASSPHRASE_TESTNET: &str = "Test SDF Network ; September 2015"; +const NETWORK_PASSPHRASE_FUTURENET: &str = "Test SDF Future Network ; October 2022"; +const NETWORK_PASSPHRASE_STANDALONE: &str = "Standalone Network ; February 2017"; + +pub struct Project(PathBuf); + +impl TryInto for PathBuf { + type Error = std::io::Error; + + fn try_into(self) -> Result { + PROJECT_DIR.extract(&self)?; + Ok(Project(self)) + } +} + +impl AsRef for Project { + fn as_ref(&self) -> &Path { + self.0.as_ref() + } +} + +impl Project { + /// Initialize a new JS client project, updating placeholder strings in the template and + /// appending functions for each method in the contract to the index.ts file. + /// + /// # Arguments + /// + /// * `contract_name` - The colloquial name of this contract that will be used in the README and package.json + /// * `contract_id` - The ID/address of the contract on the network. Will be overridable with environment variables. + /// * `rpc_url` - The RPC URL of the network where this contract is deployed. Will be overridable with environment variables. + /// * `network_passphrase` - The passphrase of the network where this contract is deployed. Will be overridable with environment variables. + /// * `spec` - The contract specification. + pub fn init( + &self, + contract_name: &str, + contract_id: &str, + rpc_url: &str, + network_passphrase: &str, + spec: &[ScSpecEntry], + ) -> std::io::Result<()> { + self.replace_placeholder_patterns(contract_name, contract_id, rpc_url, network_passphrase)?; + self.append_index_ts(spec, contract_id, network_passphrase) + } + + fn replace_placeholder_patterns( + &self, + contract_name: &str, + contract_id: &str, + rpc_url: &str, + network_passphrase: &str, + ) -> std::io::Result<()> { + let replacement_strings = &[ + ("INSERT_CONTRACT_NAME_HERE", contract_name), + ( + "INSERT_SCREAMING_SNAKE_CASE_CONTRACT_NAME_HERE", + &contract_name.to_shouty_snake_case(), + ), + ( + "INSERT_CAMEL_CASE_CONTRACT_NAME_HERE", + &contract_name.to_lower_camel_case(), + ), + ("INSERT_CONTRACT_ID_HERE", contract_id), + ("INSERT_NETWORK_PASSPHRASE_HERE", network_passphrase), + ("INSERT_RPC_URL_HERE", rpc_url), + ]; + let root: &Path = self.as_ref(); + [ + "package.json", + "README.md", + "src/assembled-tx.ts", + "src/index.ts", + "src/method-options.ts", + ] + .into_iter() + .try_for_each(|file_name| { + let file = &root.join(file_name); + let mut contents = fs::read_to_string(file).unwrap(); + for (pattern, replacement) in replacement_strings { + contents = contents.replace(pattern, replacement); + } + fs::write(file, contents) + }) + } + + fn append_index_ts( + &self, + spec: &[ScSpecEntry], + contract_id: &str, + network_passphrase: &str, + ) -> std::io::Result<()> { + let networks = Project::format_networks_object(contract_id, network_passphrase); + let types_and_fns = generate(spec); + fs::OpenOptions::new() + .append(true) + .open(self.0.join("src/index.ts"))? + .write_all(format!("\n\n{networks}\n\n{types_and_fns}").as_bytes()) + } + + fn format_networks_object(contract_id: &str, network_passphrase: &str) -> String { + let network = match network_passphrase { + NETWORK_PASSPHRASE_TESTNET => "testnet", + NETWORK_PASSPHRASE_FUTURENET => "futurenet", + NETWORK_PASSPHRASE_STANDALONE => "standalone", + _ => "unknown", + }; + format!( + r#"export const networks = {{ + {network}: {{ + networkPassphrase: "{network_passphrase}", + contractId: "{contract_id}", + }} +}} as const"# + ) + } +} + +#[cfg(test)] +mod test { + use temp_dir::TempDir; + use walkdir::WalkDir; + + use super::*; + + const EXAMPLE_WASM: &[u8] = include_bytes!( + "../../../../target/wasm32-unknown-unknown/test-wasms/test_custom_types.wasm" + ); + + fn init(root: impl AsRef) -> std::io::Result { + let spec = soroban_spec::read::from_wasm(EXAMPLE_WASM).unwrap(); + let p: Project = root.as_ref().to_path_buf().try_into()?; + p.init( + "test_custom_types", + "CA3D5KRYM6CB7OWQ6TWYRR3Z4T7GNZLKERYNZGGA5SOAOPIFY6YQGAXE", + "https://rpc-futurenet.stellar.org:443", + "Test SDF Future Network ; October 2022", + &spec, + ) + .unwrap(); + Ok(p) + } + + // TODO : fix the test below : + // the test below should verify only a certain subset of the files were copied + // rather then the entire directory. + #[ignore] + #[test] + fn test_project_dir_location() { + // TODO: Ensure windows support + if cfg!(windows) { + return; + } + let temp_dir = TempDir::new().unwrap(); + let _: Project = init(temp_dir.path()).unwrap(); + let fixture = PathBuf::from("./fixtures/test_custom_types"); + assert_dirs_equal(temp_dir.path(), &fixture); + } + + #[ignore] + #[test] + fn build_package() { + let root = PathBuf::from("./fixtures/ts"); + std::fs::remove_dir_all(&root).unwrap_or_default(); + std::fs::create_dir_all(&root).unwrap(); + let _: Project = init(&root).unwrap(); + println!("Updated Snapshot!"); + } + + fn assert_dirs_equal>(dir1: P, dir2: P) { + let walker1 = WalkDir::new(&dir1); + let walker2 = WalkDir::new(&dir2); + + let mut paths1: Vec<_> = walker1.into_iter().collect::>().unwrap(); + let mut paths2: Vec<_> = walker2.into_iter().collect::>().unwrap(); + + paths1 + .sort_unstable_by_key(|entry| entry.path().strip_prefix(&dir1).unwrap().to_path_buf()); + paths2 + .sort_unstable_by_key(|entry| entry.path().strip_prefix(&dir2).unwrap().to_path_buf()); + + assert_eq!( + paths1.len(), + paths2.len(), + "{paths1:?}.len() != {paths2:?}.len()" + ); + + for (entry1, entry2) in paths1.iter().zip(paths2.iter()) { + let path1 = entry1.path(); + let path2 = entry2.path(); + + if path1.is_file() && path2.is_file() { + let content1 = fs::read_to_string(path1).unwrap(); + let content2 = fs::read_to_string(path2).unwrap(); + pretty_assertions::assert_eq!(content1, content2, "{:?} != {:?}", path1, path2); + } else if path1.is_dir() && path2.is_dir() { + continue; + } else { + panic!( + "{:?} is not a file", + if path1.is_file() { path2 } else { path1 } + ); + } + } + } +} diff --git a/cmd/crates/soroban-spec-typescript/src/lib.rs b/cmd/crates/soroban-spec-typescript/src/lib.rs new file mode 100644 index 00000000..790f43b0 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/src/lib.rs @@ -0,0 +1,377 @@ +#![allow( + clippy::missing_errors_doc, + clippy::must_use_candidate, + clippy::missing_panics_doc +)] + +use std::{fs, io}; + +use crate::types::Type; +use heck::ToLowerCamelCase; +use itertools::Itertools; +use sha2::{Digest, Sha256}; +use stellar_xdr::curr::{Limits, ScSpecEntry, WriteXdr}; + +use types::Entry; + +use soroban_spec::read::{from_wasm, FromWasmError}; + +pub mod boilerplate; +mod types; +pub mod wrapper; + +#[derive(thiserror::Error, Debug)] +pub enum GenerateFromFileError { + #[error("reading file: {0}")] + Io(io::Error), + #[error("sha256 does not match, expected: {expected}")] + VerifySha256 { expected: String }, + #[error("parsing contract spec: {0}")] + Parse(stellar_xdr::curr::Error), + #[error("getting contract spec: {0}")] + GetSpec(FromWasmError), +} + +pub fn generate_from_file( + file: &str, + verify_sha256: Option<&str>, +) -> Result { + // Read file. + let wasm = fs::read(file).map_err(GenerateFromFileError::Io)?; + + // Produce hash for file. + let sha256 = Sha256::digest(&wasm); + let sha256 = format!("{sha256:x}"); + + if let Some(verify_sha256) = verify_sha256 { + if verify_sha256 != sha256 { + return Err(GenerateFromFileError::VerifySha256 { expected: sha256 }); + } + } + + // Generate code. + let json = generate_from_wasm(&wasm).map_err(GenerateFromFileError::GetSpec)?; + Ok(json) +} + +pub fn generate_from_wasm(wasm: &[u8]) -> Result { + let spec = from_wasm(wasm)?; + let json = generate(&spec); + Ok(json) +} + +fn generate_class(fns: &[Entry], spec: &[ScSpecEntry]) -> String { + let methods = fns.iter().map(entry_to_method).join("\n\n "); + let parsers = fns + .iter() + .filter_map(entry_to_parser) + .map(|(method, parser)| format!("{method}: {parser}")) + .join(",\n "); + let from_jsons = fns + .iter() + .filter_map(entry_to_parser) + .map(|(method, _)| { + format!("{method}: this.txFromJSON>") + }) + .join(",\n "); + let spec = spec + .iter() + .map(|s| format!("\"{}\"", s.to_xdr_base64(Limits::none()).unwrap())) + .join(",\n "); + format!( + r#"export class Contract {{ + spec: ContractSpec; + constructor(public readonly options: ClassOptions) {{ + this.spec = new ContractSpec([ + {spec} + ]); + }} + private readonly parsers = {{ + {parsers} + }}; + private txFromJSON = (json: string): AssembledTransaction => {{ + const {{ method, ...tx }} = JSON.parse(json) + return AssembledTransaction.fromJSON( + {{ + ...this.options, + method, + parseResultXdr: this.parsers[method], + }}, + tx, + ); + }} + public readonly fromJSON = {{ + {from_jsons} + }} + {methods} +}}"#, + ) +} + +pub fn generate(spec: &[ScSpecEntry]) -> String { + let mut collected: Vec<_> = spec.iter().map(Entry::from).collect(); + if !spec.iter().any(is_error_enum) { + collected.push(Entry::ErrorEnum { + doc: String::new(), + name: "Error".to_string(), + cases: vec![], + }); + } + let (fns, other): (Vec<_>, Vec<_>) = collected + .into_iter() + .partition(|entry| matches!(entry, Entry::Function { .. })); + let top = other.iter().map(entry_to_method).join("\n"); + let bottom = generate_class(&fns, spec); + format!("{top}\n\n{bottom}") +} + +fn doc_to_ts_doc(doc: &str, method: Option<&str>) -> String { + let header = if let Some(method) = method { + format!( + r#"/** + * Construct and simulate a {method} transaction. Returns an `AssembledTransaction` object which will have a `result` field containing the result of the simulation. If this transaction changes contract state, you will need to call `signAndSend()` on the returned object."# + ) + } else { + "/**\n ".to_string() + }; + let footer = "\n */\n"; + let body = if doc.is_empty() { + String::new() + } else { + doc.split('\n').join("\n * ") + }; + format!(r#"{header}{body}{footer}"#) +} + +fn is_error_enum(entry: &ScSpecEntry) -> bool { + matches!(entry, ScSpecEntry::UdtErrorEnumV0(_)) +} + +const METHOD_OPTIONS: &str = r"{ + /** + * The fee to pay for the transaction. Default: 100. + */ + fee?: number, + }"; + +fn jsify_name(name: &String) -> String { + name.to_lower_camel_case() +} + +pub fn entry_to_parser(entry: &Entry) -> Option<(String, String)> { + if let Entry::Function { name, outputs, .. } = entry { + let mut is_result = false; + let mut return_type: String; + if outputs.is_empty() { + return_type = "void".to_owned(); + } else if outputs.len() == 1 { + return_type = type_to_ts(&outputs[0]); + is_result = return_type.starts_with("Result<"); + } else { + return_type = format!("readonly [{}]", outputs.iter().map(type_to_ts).join(", ")); + }; + + if is_result { + return_type = return_type + .strip_prefix("Result<") + .unwrap() + .strip_suffix('>') + .unwrap() + .to_owned(); + return_type = format!("Ok<{return_type}> | Err"); + } + + let output = outputs + .first() + .map(|_| format!("this.spec.funcResToNative(\"{name}\", result)")) + .unwrap_or_default(); + let parse_result_xdr = if return_type == "void" { + r"() => {}".to_owned() + } else if is_result { + format!( + r"(result: XDR_BASE64 | Err): {return_type} => {{ + if (result instanceof Err) return result + return new Ok({output}) + }}" + ) + } else { + format!(r"(result: XDR_BASE64): {return_type} => {output}") + }; + let js_name = jsify_name(name); + Some((js_name, parse_result_xdr)) + } else { + None + } +} + +#[allow(clippy::too_many_lines)] +pub fn entry_to_method(entry: &Entry) -> String { + match entry { + Entry::Function { + doc, name, inputs, .. + } => { + let input_vals = inputs.iter().map(func_input_to_arg_name).join(", "); + let input = (!inputs.is_empty()) + .then(|| { + format!( + "{{{input_vals}}}: {{{}}}, ", + inputs.iter().map(func_input_to_ts).join(", ") + ) + }) + .unwrap_or_default(); + let ts_doc = doc_to_ts_doc(doc, Some(name)); + let (js_name, _) = entry_to_parser(entry).unwrap(); + let parsed_scvals = inputs.iter().map(parse_arg_to_scval).join(", "); + let args = + format!("args: this.spec.funcArgsToScVals(\"{name}\", {{{parsed_scvals}}}),"); + let body = format!( + r#"return await AssembledTransaction.fromSimulation({{ + method: '{name}', + {args} + ...options, + ...this.options, + errorTypes: Errors, + parseResultXdr: this.parsers['{js_name}'], + }});"# + ); + format!( + r#" {ts_doc} {js_name} = async ({input}options: {METHOD_OPTIONS} = {{}}) => {{ + {body} + }} +"# + ) + } + Entry::Struct { doc, name, fields } => { + let docs = doc_to_ts_doc(doc, None); + let fields = fields.iter().map(field_to_ts).join("\n "); + format!( + r#"{docs}export interface {name} {{ + {fields} +}} +"# + ) + } + + Entry::TupleStruct { doc, name, fields } => { + let docs = doc_to_ts_doc(doc, None); + let fields = fields.iter().map(type_to_ts).join(", "); + format!("{docs}export type {name} = readonly [{fields}];") + } + + Entry::Union { name, doc, cases } => { + let doc = doc_to_ts_doc(doc, None); + let cases = cases.iter().map(case_to_ts).join(" | "); + + format!( + r#"{doc}export type {name} = {cases}; +"# + ) + } + Entry::Enum { doc, name, cases } => { + let doc = doc_to_ts_doc(doc, None); + let cases = cases.iter().map(enum_case_to_ts).join("\n "); + let name = (name == "Error") + .then(|| format!("{name}s")) + .unwrap_or(name.to_string()); + format!( + r#"{doc}export enum {name} {{ + {cases} +}} +"#, + ) + } + Entry::ErrorEnum { doc, cases, .. } => { + let doc = doc_to_ts_doc(doc, None); + let cases = cases + .iter() + .map(|c| format!("{}: {{message:\"{}\"}}", c.value, c.doc)) + .join(",\n "); + format!( + r#"{doc}export const Errors = {{ +{cases} +}}"# + ) + } + } +} + +fn enum_case_to_ts(case: &types::EnumCase) -> String { + let types::EnumCase { name, value, .. } = case; + format!("{name} = {value},") +} + +fn case_to_ts(case: &types::UnionCase) -> String { + let types::UnionCase { name, values, .. } = case; + format!( + "{{tag: \"{name}\", values: {}}}", + type_to_ts(&Type::Tuple { + elements: values.clone(), + }) + ) +} + +fn field_to_ts(field: &types::StructField) -> String { + let types::StructField { doc, name, value } = field; + let doc = doc_to_ts_doc(doc, None); + let type_ = type_to_ts(value); + format!("{doc}{name}: {type_};") +} + +pub fn func_input_to_ts(input: &types::FunctionInput) -> String { + let types::FunctionInput { name, value, .. } = input; + let type_ = type_to_ts(value); + format!("{name}: {type_}") +} + +pub fn func_input_to_arg_name(input: &types::FunctionInput) -> String { + let types::FunctionInput { name, .. } = input; + name.to_string() +} + +pub fn parse_arg_to_scval(input: &types::FunctionInput) -> String { + let types::FunctionInput { name, value, .. } = input; + match value { + types::Type::Address => format!("{name}: new Address({name})"), + _ => name.to_string(), + } +} + +pub fn type_to_ts(value: &types::Type) -> String { + match value { + types::Type::U64 => "u64".to_owned(), + types::Type::I64 => "i64".to_owned(), + types::Type::U128 => "u128".to_owned(), + types::Type::I128 => "i128".to_owned(), + types::Type::U32 => "u32".to_owned(), + types::Type::I32 => "i32".to_owned(), + types::Type::Bool => "boolean".to_owned(), + types::Type::Symbol | types::Type::String => "string".to_owned(), + types::Type::Map { key, value } => { + format!("Map<{}, {}>", type_to_ts(key), type_to_ts(value)) + } + types::Type::Option { value } => format!("Option<{}>", type_to_ts(value)), + types::Type::Result { value, .. } => { + format!("Result<{}>", type_to_ts(value)) + } + types::Type::Vec { element } => format!("Array<{}>", type_to_ts(element)), + types::Type::Tuple { elements } => { + if elements.is_empty() { + "void".to_owned() + } else { + format!("readonly [{}]", elements.iter().map(type_to_ts).join(", ")) + } + } + types::Type::Custom { name } => name.clone(), + // TODO: Figure out what js type to map this to. There is already an `Error_` one that + // ahalabs have added in the bindings, so.. maybe rename that? + types::Type::Val => "any".to_owned(), + types::Type::Error { .. } => "Error_".to_owned(), + types::Type::Address => "string".to_string(), + types::Type::Bytes | types::Type::BytesN { .. } => "Buffer".to_string(), + types::Type::Void => "void".to_owned(), + types::Type::U256 => "u256".to_string(), + types::Type::I256 => "i256".to_string(), + types::Type::Timepoint => "Timepoint".to_string(), + types::Type::Duration => "Duration".to_string(), + } +} diff --git a/cmd/crates/soroban-spec-typescript/src/project_template/.gitignore b/cmd/crates/soroban-spec-typescript/src/project_template/.gitignore new file mode 100644 index 00000000..72aae85f --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/src/project_template/.gitignore @@ -0,0 +1,2 @@ +node_modules/ +out/ diff --git a/cmd/crates/soroban-spec-typescript/src/project_template/README.md b/cmd/crates/soroban-spec-typescript/src/project_template/README.md new file mode 100644 index 00000000..3995e52b --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/src/project_template/README.md @@ -0,0 +1,54 @@ +# INSERT_CONTRACT_NAME_HERE JS + +JS library for interacting with [Soroban](https://soroban.stellar.org/) smart contract `INSERT_CONTRACT_NAME_HERE` via Soroban RPC. + +This library was automatically generated by Soroban CLI using a command similar to: + +```bash +soroban contract bindings ts \ + --rpc-url INSERT_RPC_URL_HERE \ + --network-passphrase "INSERT_NETWORK_PASSPHRASE_HERE" \ + --contract-id INSERT_CONTRACT_ID_HERE \ + --output-dir ./path/to/INSERT_CONTRACT_NAME_HERE +``` + +The network passphrase and contract ID are exported from [index.ts](./src/index.ts) in the `networks` constant. If you are the one who generated this library and you know that this contract is also deployed to other networks, feel free to update `networks` with other valid options. This will help your contract consumers use this library more easily. + +# To publish or not to publish + +This library is suitable for publishing to NPM. You can publish it to NPM using the `npm publish` command. + +But you don't need to publish this library to NPM to use it. You can add it to your project's `package.json` using a file path: + +```json +"dependencies": { + "INSERT_CONTRACT_NAME_HERE": "./path/to/this/folder" +} +``` + +However, we've actually encountered [frustration](https://github.com/stellar/soroban-example-dapp/pull/117#discussion_r1232873560) using local libraries with NPM in this way. Though it seems a bit messy, we suggest generating the library directly to your `node_modules` folder automatically after each install by using a `postinstall` script. We've had the least trouble with this approach. NPM will automatically remove what it sees as erroneous directories during the `install` step, and then regenerate them when it gets to your `postinstall` step, which will keep the library up-to-date with your contract. + +```json +"scripts": { + "postinstall": "soroban contract bindings ts --rpc-url INSERT_RPC_URL_HERE --network-passphrase \"INSERT_NETWORK_PASSPHRASE_HERE\" --id INSERT_CONTRACT_ID_HERE --name INSERT_CONTRACT_NAME_HERE" +} +``` + +Obviously you need to adjust the above command based on the actual command you used to generate the library. + +# Use it + +Now that you have your library up-to-date and added to your project, you can import it in a file and see inline documentation for all of its exported methods: + +```js +import { Contract, networks } from "INSERT_CONTRACT_NAME_HERE" + +const contract = new Contract({ + ...networks.futurenet, // for example; check which networks this library exports + rpcUrl: '...', // use your own, or find one for testing at https://soroban.stellar.org/docs/reference/rpc#public-rpc-providers +}) + +contract.| +``` + +As long as your editor is configured to show JavaScript/TypeScript documentation, you can pause your typing at that `|` to get a list of all exports and inline-documentation for each. It exports a separate [async](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function) function for each method in the smart contract, with documentation for each generated from the comments the contract's author included in the original source code. diff --git a/cmd/crates/soroban-spec-typescript/src/project_template/package.json b/cmd/crates/soroban-spec-typescript/src/project_template/package.json new file mode 100644 index 00000000..e32d0f5e --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/src/project_template/package.json @@ -0,0 +1,20 @@ +{ + "version": "0.0.0", + "name": "INSERT_CONTRACT_NAME_HERE", + "dependencies": { + "@stellar/freighter-api": "1.7.1", + "buffer": "6.0.3", + "@stellar/stellar-sdk": "11.2.0" + }, + "scripts": { + "build": "node ./scripts/build.mjs" + }, + "exports": { + "require": "./dist/cjs/index.js", + "import": "./dist/esm/index.js" + }, + "typings": "dist/types/index.d.ts", + "devDependencies": { + "typescript": "5.3.3" + } +} diff --git a/cmd/crates/soroban-spec-typescript/src/project_template/scripts/build.mjs b/cmd/crates/soroban-spec-typescript/src/project_template/scripts/build.mjs new file mode 100644 index 00000000..15a17042 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/src/project_template/scripts/build.mjs @@ -0,0 +1,37 @@ +import { spawnSync } from "node:child_process" +import fs from "node:fs" +import path from "node:path" + +const buildDir = "./dist" + +const { error, stderr } = spawnSync("tsc", ["-b", "./scripts/tsconfig.cjs.json", "./scripts/tsconfig.esm.json", "./scripts/tsconfig.types.json"], { stdio: "inherit" }) + +if (error) { + console.error(stderr) + console.error(error) + throw error +} + +function createEsmModulePackageJson() { + fs.readdir(buildDir, function (err, dirs) { + if (err) { + throw err + } + dirs.forEach(function (dir) { + if (dir === "esm") { + // 1. add package.json file with "type": "module" + var packageJsonFile = path.join(buildDir, dir, "/package.json") + if (!fs.existsSync(packageJsonFile)) { + fs.writeFileSync( + packageJsonFile, + '{"type": "module"}', + 'utf8', + err => { if (err) throw err } + ) + } + } + }) + }) +} + +createEsmModulePackageJson() diff --git a/cmd/crates/soroban-spec-typescript/src/project_template/scripts/tsconfig.cjs.json b/cmd/crates/soroban-spec-typescript/src/project_template/scripts/tsconfig.cjs.json new file mode 100644 index 00000000..542ea86d --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/src/project_template/scripts/tsconfig.cjs.json @@ -0,0 +1,7 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "outDir": "../dist/cjs", + "module": "commonjs" + } +} diff --git a/cmd/crates/soroban-spec-typescript/src/project_template/scripts/tsconfig.esm.json b/cmd/crates/soroban-spec-typescript/src/project_template/scripts/tsconfig.esm.json new file mode 100644 index 00000000..92b45277 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/src/project_template/scripts/tsconfig.esm.json @@ -0,0 +1,7 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "outDir": "../dist/esm", + "module": "esnext" + } +} diff --git a/cmd/crates/soroban-spec-typescript/src/project_template/scripts/tsconfig.types.json b/cmd/crates/soroban-spec-typescript/src/project_template/scripts/tsconfig.types.json new file mode 100644 index 00000000..8a24fc13 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/src/project_template/scripts/tsconfig.types.json @@ -0,0 +1,8 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "outDir": "../dist/types", + "declaration": true, + "emitDeclarationOnly": true + } +} diff --git a/cmd/crates/soroban-spec-typescript/src/project_template/src/assembled-tx.ts b/cmd/crates/soroban-spec-typescript/src/project_template/src/assembled-tx.ts new file mode 100644 index 00000000..f4f892d4 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/src/project_template/src/assembled-tx.ts @@ -0,0 +1,664 @@ +import { + Account, + Address, + Contract, + Operation, + SorobanRpc, + StrKey, + TimeoutInfinite, + TransactionBuilder, + authorizeEntry, + hash, + nativeToScVal, + xdr, + BASE_FEE, +} from "@stellar/stellar-sdk"; +import type { Memo, MemoType, Transaction } from "@stellar/stellar-sdk"; +import { Buffer } from "buffer"; +import type { + ClassOptions, + MethodOptions, + Wallet, + XDR_BASE64, +} from "./method-options.js"; + +export type Tx = Transaction, Operation[]> + +export class ExpiredStateError extends Error { } +export class NeedsMoreSignaturesError extends Error { } +export class WalletDisconnectedError extends Error { } +export class SendResultOnlyError extends Error { } +export class SendFailedError extends Error { } +export class NoUnsignedNonInvokerAuthEntriesError extends Error { } + +type SendTx = SorobanRpc.Api.SendTransactionResponse; +type GetTx = SorobanRpc.Api.GetTransactionResponse; + +export type u32 = number; +export type i32 = number; +export type u64 = bigint; +export type i64 = bigint; +export type u128 = bigint; +export type i128 = bigint; +export type u256 = bigint; +export type i256 = bigint; +export type Option = T | undefined; +export type Typepoint = bigint; +export type Duration = bigint; +export {Address}; + +/// Error interface containing the error message +export interface Error_ { message: string }; + +export interface Result { + unwrap(): T, + unwrapErr(): E, + isOk(): boolean, + isErr(): boolean, +}; + +export class Ok implements Result { + constructor(readonly value: T) { } + unwrapErr(): E { + throw new Error('No error'); + } + unwrap(): T { + return this.value; + } + + isOk(): boolean { + return true; + } + + isErr(): boolean { + return !this.isOk() + } +} + +export class Err implements Result { + constructor(readonly error: E) { } + unwrapErr(): E { + return this.error; + } + unwrap(): never { + throw new Error(this.error.message); + } + + isOk(): boolean { + return false; + } + + isErr(): boolean { + return !this.isOk() + } +} + +export const contractErrorPattern = /Error\(Contract, #(\d+)\)/; + +type AssembledTransactionOptions = MethodOptions & + ClassOptions & { + method: string; + args?: any[]; + parseResultXdr: (xdr: string | xdr.ScVal | Err) => T; + }; + +export const NULL_ACCOUNT = "GAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWHF" + +export class AssembledTransaction { + public raw: Tx + private simulation?: SorobanRpc.Api.SimulateTransactionResponse + private simulationResult?: SorobanRpc.Api.SimulateHostFunctionResult + private simulationTransactionData?: xdr.SorobanTransactionData + private server: SorobanRpc.Server + + toJSON() { + return JSON.stringify({ + method: this.options.method, + tx: this.raw?.toXDR(), + simulationResult: { + auth: this.simulationData.result.auth.map(a => a.toXDR('base64')), + retval: this.simulationData.result.retval.toXDR('base64'), + }, + simulationTransactionData: this.simulationData.transactionData.toXDR('base64'), + }) + } + + static fromJSON( + options: Omit, 'args'>, + { tx, simulationResult, simulationTransactionData }: + { + tx: XDR_BASE64, + simulationResult: { + auth: XDR_BASE64[], + retval: XDR_BASE64, + }, + simulationTransactionData: XDR_BASE64, + } + ): AssembledTransaction { + const txn = new AssembledTransaction(options) + txn.raw = TransactionBuilder.fromXDR(tx, options.networkPassphrase) as Tx + txn.simulationResult = { + auth: simulationResult.auth.map(a => xdr.SorobanAuthorizationEntry.fromXDR(a, 'base64')), + retval: xdr.ScVal.fromXDR(simulationResult.retval, 'base64'), + } + txn.simulationTransactionData = xdr.SorobanTransactionData.fromXDR(simulationTransactionData, 'base64') + return txn + } + + private constructor(public options: AssembledTransactionOptions) { + this.server = new SorobanRpc.Server(this.options.rpcUrl, { + allowHttp: this.options.rpcUrl.startsWith("http://"), + }); + } + + static async fromSimulation(options: AssembledTransactionOptions): Promise> { + const tx = new AssembledTransaction(options) + const contract = new Contract(options.contractId); + + tx.raw = new TransactionBuilder(await tx.getAccount(), { + fee: options.fee?.toString(10) ?? BASE_FEE, + networkPassphrase: options.networkPassphrase, + }) + .addOperation(contract.call(options.method, ...(options.args ?? []))) + .setTimeout(TimeoutInfinite) + .build(); + + return await tx.simulate() + } + + simulate = async (): Promise => { + if (!this.raw) throw new Error('Transaction has not yet been assembled') + this.simulation = await this.server.simulateTransaction(this.raw); + + if (SorobanRpc.Api.isSimulationSuccess(this.simulation)) { + this.raw = SorobanRpc.assembleTransaction( + this.raw, + this.simulation + ).build() + } + + return this + } + + get simulationData(): { + result: SorobanRpc.Api.SimulateHostFunctionResult + transactionData: xdr.SorobanTransactionData + } { + if (this.simulationResult && this.simulationTransactionData) { + return { + result: this.simulationResult, + transactionData: this.simulationTransactionData, + } + } + // else, we know we just did the simulation on this machine + const simulation = this.simulation! + if (SorobanRpc.Api.isSimulationError(simulation)) { + throw new Error(`Transaction simulation failed: "${simulation.error}"`) + } + + if (SorobanRpc.Api.isSimulationRestore(simulation)) { + throw new ExpiredStateError(`You need to restore some contract state before you can invoke this method. ${JSON.stringify(simulation, null, 2)}`) + } + + if (!simulation.result) { + throw new Error(`Expected an invocation simulation, but got no 'result' field. Simulation: ${JSON.stringify(simulation, null, 2)}`) + } + + // add to object for serialization & deserialization + this.simulationResult = simulation.result + this.simulationTransactionData = simulation.transactionData.build() + + return { + result: this.simulationResult, + transactionData: this.simulationTransactionData!, + } + } + + get result(): T { + try { + return this.options.parseResultXdr(this.simulationData.result.retval) + } catch (e) { + let err = this.parseError(e.toString()) + if (err) return err as T + throw e + } + } + + parseError(errorMessage: string): Err | undefined { + if (!this.options.errorTypes) return + const match = errorMessage.match(contractErrorPattern) + if (!match) return + let i = parseInt(match[1], 10) + let err = this.options.errorTypes[i] + if (err) return new Err(err) + } + + getWallet = async (): Promise => { + return this.options.wallet ?? (await import("@stellar/freighter-api")).default + } + + getPublicKey = async (): Promise => { + const wallet = await this.getWallet() + if (await wallet.isConnected() && await wallet.isAllowed()) { + return (await wallet.getUserInfo()).publicKey + } + } + + /** + * Get account details from the Soroban network for the publicKey currently + * selected in user's wallet. If not connected to Freighter, use placeholder + * null account. + */ + getAccount = async (): Promise => { + const publicKey = await this.getPublicKey() + return publicKey + ? await this.server.getAccount(publicKey) + : new Account(NULL_ACCOUNT, "0") + } + + /** + * Sign the transaction with the `wallet` (default Freighter), then send to + * the network and return a `SentTransaction` that keeps track of all the + * attempts to send and fetch the transaction from the network. + */ + signAndSend = async ({ secondsToWait = 10, force = false }: { + /** + * Wait `secondsToWait` seconds (default: 10) for both the transaction to SEND successfully (will keep trying if the server returns `TRY_AGAIN_LATER`), as well as for the transaction to COMPLETE (will keep checking if the server returns `PENDING`). + */ + secondsToWait?: number + /** + * If `true`, sign and send the transaction even if it is a read call. + */ + force?: boolean + } = {}): Promise> => { + if (!this.raw) { + throw new Error('Transaction has not yet been simulated') + } + + if (!force && this.isReadCall) { + throw new Error('This is a read call. It requires no signature or sending. Use `force: true` to sign and send anyway.') + } + + if (!await this.hasRealInvoker()) { + throw new WalletDisconnectedError('Wallet is not connected') + } + + if (this.raw.source !== (await this.getAccount()).accountId()) { + throw new Error(`You must submit the transaction with the account that originally created it. Please switch to the wallet with "${this.raw.source}" as its public key.`) + } + + if ((await this.needsNonInvokerSigningBy()).length) { + throw new NeedsMoreSignaturesError( + 'Transaction requires more signatures. See `needsNonInvokerSigningBy` for details.' + ) + } + + return await SentTransaction.init(this.options, this, secondsToWait); + } + + getStorageExpiration = async () => { + const entryRes = await this.server.getLedgerEntries( + new Contract(this.options.contractId).getFootprint() + ) + if ( + !entryRes.entries || + !entryRes.entries.length || + !entryRes.entries[0].liveUntilLedgerSeq + ) throw new Error('failed to get ledger entry') + return entryRes.entries[0].liveUntilLedgerSeq + } + + /** + * Get a list of accounts, other than the invoker of the simulation, that + * need to sign auth entries in this transaction. + * + * Soroban allows multiple people to sign a transaction. Someone needs to + * sign the final transaction envelope; this person/account is called the + * _invoker_, or _source_. Other accounts might need to sign individual auth + * entries in the transaction, if they're not also the invoker. + * + * This function returns a list of accounts that need to sign auth entries, + * assuming that the same invoker/source account will sign the final + * transaction envelope as signed the initial simulation. + * + * One at a time, for each public key in this array, you will need to + * serialize this transaction with `toJSON`, send to the owner of that key, + * deserialize the transaction with `txFromJson`, and call + * {@link signAuthEntries}. Then re-serialize and send to the next account + * in this list. + */ + needsNonInvokerSigningBy = async ({ + includeAlreadySigned = false, + }: { + /** + * Whether or not to include auth entries that have already been signed. Default: false + */ + includeAlreadySigned?: boolean + } = {}): Promise => { + if (!this.raw) { + throw new Error('Transaction has not yet been simulated') + } + + // We expect that any transaction constructed by these libraries has a + // single operation, which is an InvokeHostFunction operation. The host + // function being invoked is the contract method call. + if (!("operations" in this.raw)) { + throw new Error( + `Unexpected Transaction type; no operations: ${JSON.stringify(this.raw) + }` + ) + } + const rawInvokeHostFunctionOp = this.raw + .operations[0] as Operation.InvokeHostFunction + + return [...new Set((rawInvokeHostFunctionOp.auth ?? []).filter(entry => + entry.credentials().switch() === + xdr.SorobanCredentialsType.sorobanCredentialsAddress() && + ( + includeAlreadySigned || + entry.credentials().address().signature().switch().name === 'scvVoid' + ) + ).map(entry => StrKey.encodeEd25519PublicKey( + entry.credentials().address().address().accountId().ed25519() + )))] + } + + preImageFor( + entry: xdr.SorobanAuthorizationEntry, + signatureExpirationLedger: number + ): xdr.HashIdPreimage { + const addrAuth = entry.credentials().address() + return xdr.HashIdPreimage.envelopeTypeSorobanAuthorization( + new xdr.HashIdPreimageSorobanAuthorization({ + networkId: hash(Buffer.from(this.options.networkPassphrase)), + nonce: addrAuth.nonce(), + invocation: entry.rootInvocation(), + signatureExpirationLedger, + }), + ) + } + + /** + * If {@link needsNonInvokerSigningBy} returns a non-empty list, you can serialize + * the transaction with `toJSON`, send it to the owner of one of the public keys + * in the map, deserialize with `txFromJSON`, and call this method on their + * machine. Internally, this will use `signAuthEntry` function from connected + * `wallet` for each. + * + * Then, re-serialize the transaction and either send to the next + * `needsNonInvokerSigningBy` owner, or send it back to the original account + * who simulated the transaction so they can {@link sign} the transaction + * envelope and {@link send} it to the network. + * + * Sending to all `needsNonInvokerSigningBy` owners in parallel is not currently + * supported! + */ + signAuthEntries = async ( + /** + * When to set each auth entry to expire. Could be any number of blocks in + * the future. Can be supplied as a promise or a raw number. Default: + * contract's current `persistent` storage expiration date/ledger + * number/block. + */ + expiration: number | Promise = this.getStorageExpiration() + ): Promise => { + if (!this.raw) throw new Error('Transaction has not yet been assembled or simulated') + const needsNonInvokerSigningBy = await this.needsNonInvokerSigningBy() + + if (!needsNonInvokerSigningBy) throw new NoUnsignedNonInvokerAuthEntriesError('No unsigned non-invoker auth entries; maybe you already signed?') + const publicKey = await this.getPublicKey() + if (!publicKey) throw new Error('Could not get public key from wallet; maybe Freighter is not signed in?') + if (needsNonInvokerSigningBy.indexOf(publicKey) === -1) throw new Error(`No auth entries for public key "${publicKey}"`) + const wallet = await this.getWallet() + + const rawInvokeHostFunctionOp = this.raw + .operations[0] as Operation.InvokeHostFunction + + const authEntries = rawInvokeHostFunctionOp.auth ?? [] + + for (const [i, entry] of authEntries.entries()) { + if ( + entry.credentials().switch() !== + xdr.SorobanCredentialsType.sorobanCredentialsAddress() + ) { + // if the invoker/source account, then the entry doesn't need explicit + // signature, since the tx envelope is already signed by the source + // account, so only check for sorobanCredentialsAddress + continue + } + const pk = StrKey.encodeEd25519PublicKey( + entry.credentials().address().address().accountId().ed25519() + ) + + // this auth entry needs to be signed by a different account + // (or maybe already was!) + if (pk !== publicKey) continue + + authEntries[i] = await authorizeEntry( + entry, + async preimage => Buffer.from( + await wallet.signAuthEntry(preimage.toXDR('base64')), + 'base64' + ), + await expiration, + this.options.networkPassphrase + ) + } + } + + get isReadCall(): boolean { + const authsCount = this.simulationData.result.auth.length; + const writeLength = this.simulationData.transactionData.resources().footprint().readWrite().length + return (authsCount === 0) && (writeLength === 0); + } + + hasRealInvoker = async (): Promise => { + const account = await this.getAccount() + return account.accountId() !== NULL_ACCOUNT + } +} + +/** + * A transaction that has been sent to the Soroban network. This happens in two steps: + * + * 1. `sendTransaction`: initial submission of the transaction to the network. + * This step can run into problems, and will be retried with exponential + * backoff if it does. See all attempts in `sendTransactionResponseAll` and the + * most recent attempt in `sendTransactionResponse`. + * 2. `getTransaction`: once the transaction has been submitted to the network + * successfully, you need to wait for it to finalize to get the results of the + * transaction. This step can also run into problems, and will be retried with + * exponential backoff if it does. See all attempts in + * `getTransactionResponseAll` and the most recent attempt in + * `getTransactionResponse`. + */ +class SentTransaction { + public server: SorobanRpc.Server + public signed: Tx + public sendTransactionResponse?: SendTx + public sendTransactionResponseAll?: SendTx[] + public getTransactionResponse?: GetTx + public getTransactionResponseAll?: GetTx[] + + constructor(public options: AssembledTransactionOptions, public assembled: AssembledTransaction) { + this.server = new SorobanRpc.Server(this.options.rpcUrl, { + allowHttp: this.options.rpcUrl.startsWith("http://"), + }); + this.assembled = assembled + } + + static init = async ( + options: AssembledTransactionOptions, + assembled: AssembledTransaction, + secondsToWait: number = 10 + ): Promise> => { + const tx = new SentTransaction(options, assembled) + return await tx.send(secondsToWait) + } + + private send = async (secondsToWait: number = 10): Promise => { + const wallet = await this.assembled.getWallet() + + this.sendTransactionResponseAll = await withExponentialBackoff( + async (previousFailure) => { + if (previousFailure) { + // Increment transaction sequence number and resimulate before trying again + + // Soroban transaction can only have 1 operation + const op = this.assembled.raw.operations[0] as Operation.InvokeHostFunction; + + this.assembled.raw = new TransactionBuilder(await this.assembled.getAccount(), { + fee: this.assembled.raw.fee, + networkPassphrase: this.options.networkPassphrase, + }) + .setTimeout(TimeoutInfinite) + .addOperation( + Operation.invokeHostFunction({ ...op, auth: op.auth ?? [] }), + ) + .build() + + await this.assembled.simulate() + } + + const signature = await wallet.signTransaction(this.assembled.raw.toXDR(), { + networkPassphrase: this.options.networkPassphrase, + }); + + this.signed = TransactionBuilder.fromXDR( + signature, + this.options.networkPassphrase + ) as Tx + + return this.server.sendTransaction(this.signed) + }, + resp => resp.status !== "PENDING", + secondsToWait + ) + + this.sendTransactionResponse = this.sendTransactionResponseAll[this.sendTransactionResponseAll.length - 1] + + if (this.sendTransactionResponse.status !== "PENDING") { + throw new Error( + `Tried to resubmit transaction for ${secondsToWait + } seconds, but it's still failing. ` + + `All attempts: ${JSON.stringify( + this.sendTransactionResponseAll, + null, + 2 + )}` + ); + } + + const { hash } = this.sendTransactionResponse + + this.getTransactionResponseAll = await withExponentialBackoff( + () => this.server.getTransaction(hash), + resp => resp.status === SorobanRpc.Api.GetTransactionStatus.NOT_FOUND, + secondsToWait + ) + + this.getTransactionResponse = this.getTransactionResponseAll[this.getTransactionResponseAll.length - 1] + if (this.getTransactionResponse.status === SorobanRpc.Api.GetTransactionStatus.NOT_FOUND) { + console.error( + `Waited ${secondsToWait + } seconds for transaction to complete, but it did not. ` + + `Returning anyway. Check the transaction status manually. ` + + `Sent transaction: ${JSON.stringify( + this.sendTransactionResponse, + null, + 2 + )}\n` + + `All attempts to get the result: ${JSON.stringify( + this.getTransactionResponseAll, + null, + 2 + )}` + ); + } + + return this; + } + + get result(): T { + // 1. check if transaction was submitted and awaited with `getTransaction` + if ( + "getTransactionResponse" in this && + this.getTransactionResponse + ) { + // getTransactionResponse has a `returnValue` field unless it failed + if ("returnValue" in this.getTransactionResponse) { + return this.options.parseResultXdr(this.getTransactionResponse.returnValue!) + } + + // if "returnValue" not present, the transaction failed; return without parsing the result + throw new Error("Transaction failed! Cannot parse result.") + } + + // 2. otherwise, maybe it was merely sent with `sendTransaction` + if (this.sendTransactionResponse) { + const errorResult = this.sendTransactionResponse.errorResult?.result() + if (errorResult) { + throw new SendFailedError( + `Transaction simulation looked correct, but attempting to send the transaction failed. Check \`simulation\` and \`sendTransactionResponseAll\` to troubleshoot. Decoded \`sendTransactionResponse.errorResultXdr\`: ${errorResult}` + ) + } + throw new SendResultOnlyError( + `Transaction was sent to the network, but not yet awaited. No result to show. Await transaction completion with \`getTransaction(sendTransactionResponse.hash)\`` + ) + } + + // 3. finally, if neither of those are present, throw an error + throw new Error(`Sending transaction failed: ${JSON.stringify(this.assembled)}`) + } +} + +/** + * Keep calling a `fn` for `secondsToWait` seconds, if `keepWaitingIf` is true. + * Returns an array of all attempts to call the function. + */ +async function withExponentialBackoff( + fn: (previousFailure?: T) => Promise, + keepWaitingIf: (result: T) => boolean, + secondsToWait: number, + exponentialFactor = 1.5, + verbose = false, +): Promise { + const attempts: T[] = [] + + let count = 0 + attempts.push(await fn()) + if (!keepWaitingIf(attempts[attempts.length - 1])) return attempts + + const waitUntil = new Date(Date.now() + secondsToWait * 1000).valueOf() + let waitTime = 1000 + let totalWaitTime = waitTime + + while (Date.now() < waitUntil && keepWaitingIf(attempts[attempts.length - 1])) { + count++ + // Wait a beat + if (verbose) { + console.info(`Waiting ${waitTime}ms before trying again (bringing the total wait time to ${totalWaitTime}ms so far, of total ${secondsToWait * 1000}ms)`) + } + await new Promise(res => setTimeout(res, waitTime)) + // Exponential backoff + waitTime = waitTime * exponentialFactor; + if (new Date(Date.now() + waitTime).valueOf() > waitUntil) { + waitTime = waitUntil - Date.now() + if (verbose) { + console.info(`was gonna wait too long; new waitTime: ${waitTime}ms`) + } + } + totalWaitTime = waitTime + totalWaitTime + // Try again + attempts.push(await fn(attempts[attempts.length - 1])) + if (verbose && keepWaitingIf(attempts[attempts.length - 1])) { + console.info( + `${count}. Called ${fn}; ${attempts.length + } prev attempts. Most recent: ${JSON.stringify(attempts[attempts.length - 1], null, 2) + }` + ) + } + } + + return attempts +} diff --git a/cmd/crates/soroban-spec-typescript/src/project_template/src/index.ts b/cmd/crates/soroban-spec-typescript/src/project_template/src/index.ts new file mode 100644 index 00000000..f7ad0b66 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/src/project_template/src/index.ts @@ -0,0 +1,27 @@ +import { ContractSpec, Address } from '@stellar/stellar-sdk'; +import { Buffer } from "buffer"; +import { AssembledTransaction, Ok, Err } from './assembled-tx.js'; +import type { + u32, + i32, + u64, + i64, + u128, + i128, + u256, + i256, + Option, + Typepoint, + Duration, + Error_, + Result, +} from './assembled-tx.js'; +import type { ClassOptions, XDR_BASE64 } from './method-options.js'; + +export * from './assembled-tx.js'; +export * from './method-options.js'; + +if (typeof window !== 'undefined') { + //@ts-ignore Buffer exists + window.Buffer = window.Buffer || Buffer; +} diff --git a/cmd/crates/soroban-spec-typescript/src/project_template/src/method-options.ts b/cmd/crates/soroban-spec-typescript/src/project_template/src/method-options.ts new file mode 100644 index 00000000..d1ff142f --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/src/project_template/src/method-options.ts @@ -0,0 +1,50 @@ +// defined this way so typeahead shows full union, not named alias +let responseTypes: 'simulated' | 'full' | undefined +export type ResponseTypes = typeof responseTypes + +export type XDR_BASE64 = string + +export interface Wallet { + isConnected: () => Promise, + isAllowed: () => Promise, + getUserInfo: () => Promise<{ publicKey?: string }>, + signTransaction: (tx: XDR_BASE64, opts?: { + network?: string, + networkPassphrase?: string, + accountToSign?: string, + }) => Promise, + signAuthEntry: ( + entryXdr: XDR_BASE64, + opts?: { + accountToSign?: string; + } + ) => Promise +} + +export type ClassOptions = { + contractId: string + networkPassphrase: string + rpcUrl: string + errorTypes?: Record + /** + * A Wallet interface, such as Freighter, that has the methods `isConnected`, `isAllowed`, `getUserInfo`, and `signTransaction`. If not provided, will attempt to import and use Freighter. Example: + * + * @example + * ```ts + * import freighter from "@stellar/freighter-api"; + * import { Contract } from "INSERT_CONTRACT_NAME_HERE"; + * const contract = new Contract({ + * …, + * wallet: freighter, + * }) + * ``` + */ + wallet?: Wallet +} + +export type MethodOptions = { + /** + * The fee to pay for the transaction. Default: soroban-sdk's BASE_FEE ('100') + */ + fee?: number +} diff --git a/cmd/crates/soroban-spec-typescript/src/project_template/tsconfig.json b/cmd/crates/soroban-spec-typescript/src/project_template/tsconfig.json new file mode 100644 index 00000000..efd4c619 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/src/project_template/tsconfig.json @@ -0,0 +1,98 @@ +{ + "compilerOptions": { + /* Visit https://aka.ms/tsconfig to read more about this file */ + /* Projects */ + // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ + // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ + // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ + // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ + // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ + // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ + /* Language and Environment */ + "target": "ESNext", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ + // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + // "jsx": "preserve", /* Specify what JSX code is generated. */ + // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ + // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ + // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ + // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ + // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ + // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ + // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ + // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ + // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ + /* Modules */ + "module": "ESNext", /* Specify what module code is generated. */ + // "rootDir": "./", /* Specify the root folder within your source files. */ + "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */ + // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ + // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ + // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ + // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ + // "types": [], /* Specify type package names to be included without being referenced in a source file. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ + // "resolveJsonModule": true, /* Enable importing .json files. */ + // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ + /* JavaScript Support */ + // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ + // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ + // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ + /* Emit */ + "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ + // "declarationMap": true, /* Create sourcemaps for d.ts files. */ + // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ + // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ + // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ + "outDir": "./dist", /* Specify an output folder for all emitted files. */ + // "removeComments": true, /* Disable emitting comments. */ + // "noEmit": true, /* Disable emitting files from a compilation. */ + // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ + // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ + // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ + // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ + // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ + // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ + // "newLine": "crlf", /* Set the newline character for emitting files. */ + // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ + // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ + // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ + // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ + // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ + // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ + /* Interop Constraints */ + // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ + // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ + // "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ + // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ + // "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ + /* Type Checking */ + // "strict": true, /* Enable all strict type-checking options. */ + // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ + "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ + // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ + // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ + // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ + // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ + // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ + // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ + // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ + // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ + // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ + // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ + // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ + // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ + // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ + // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ + // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ + // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ + /* Completeness */ + // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ + "skipLibCheck": true /* Skip type checking all .d.ts files. */ + }, + "include": [ + "src/*" + ] +} diff --git a/cmd/crates/soroban-spec-typescript/src/types.rs b/cmd/crates/soroban-spec-typescript/src/types.rs new file mode 100644 index 00000000..02511843 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/src/types.rs @@ -0,0 +1,259 @@ +use serde::Serialize; +use stellar_xdr::curr::{ + ScSpecEntry, ScSpecFunctionInputV0, ScSpecTypeDef, ScSpecUdtEnumCaseV0, + ScSpecUdtErrorEnumCaseV0, ScSpecUdtStructFieldV0, ScSpecUdtStructV0, ScSpecUdtUnionCaseV0, +}; + +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct StructField { + pub doc: String, + pub name: String, + pub value: Type, +} + +impl From<&ScSpecUdtStructFieldV0> for StructField { + fn from(f: &ScSpecUdtStructFieldV0) -> Self { + StructField { + doc: f.doc.to_utf8_string_lossy(), + name: f.name.to_utf8_string_lossy(), + value: (&f.type_).into(), + } + } +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct FunctionInput { + pub doc: String, + pub name: String, + pub value: Type, +} + +impl From<&ScSpecFunctionInputV0> for FunctionInput { + fn from(f: &ScSpecFunctionInputV0) -> Self { + FunctionInput { + doc: f.doc.to_utf8_string_lossy(), + name: f.name.to_utf8_string_lossy(), + value: (&f.type_).into(), + } + } +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct UnionCase { + pub doc: String, + pub name: String, + pub values: Vec, +} + +impl From<&ScSpecUdtUnionCaseV0> for UnionCase { + fn from(c: &ScSpecUdtUnionCaseV0) -> Self { + let (doc, name, values) = match c { + ScSpecUdtUnionCaseV0::VoidV0(v) => ( + v.doc.to_utf8_string_lossy(), + v.name.to_utf8_string_lossy(), + vec![], + ), + ScSpecUdtUnionCaseV0::TupleV0(t) => ( + t.doc.to_utf8_string_lossy(), + t.name.to_utf8_string_lossy(), + t.type_.iter().map(Type::from).collect(), + ), + }; + UnionCase { doc, name, values } + } +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct EnumCase { + pub doc: String, + pub name: String, + pub value: u32, +} + +impl From<&ScSpecUdtEnumCaseV0> for EnumCase { + fn from(c: &ScSpecUdtEnumCaseV0) -> Self { + EnumCase { + doc: c.doc.to_utf8_string_lossy(), + name: c.name.to_utf8_string_lossy(), + value: c.value, + } + } +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ErrorEnumCase { + pub doc: String, + pub name: String, + pub value: u32, +} + +impl From<&ScSpecUdtErrorEnumCaseV0> for ErrorEnumCase { + fn from(c: &ScSpecUdtErrorEnumCaseV0) -> Self { + ErrorEnumCase { + doc: c.doc.to_utf8_string_lossy(), + name: c.name.to_utf8_string_lossy(), + value: c.value, + } + } +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize)] +#[serde(tag = "type")] +#[serde(rename_all = "camelCase")] +pub enum Type { + Void, + Val, + U64, + I64, + U32, + I32, + U128, + I128, + U256, + I256, + Bool, + Symbol, + Bytes, + String, + Address, + Timepoint, + Duration, + Map { key: Box, value: Box }, + Option { value: Box }, + Result { value: Box, error: Box }, + Vec { element: Box }, + BytesN { n: u32 }, + Tuple { elements: Vec }, + Error { message: Option }, + Custom { name: String }, +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize)] +#[serde(tag = "type")] +#[serde(rename_all = "camelCase")] +pub enum Entry { + Function { + doc: String, + name: String, + inputs: Vec, + outputs: Vec, + }, + Struct { + doc: String, + name: String, + fields: Vec, + }, + TupleStruct { + doc: String, + name: String, + fields: Vec, + }, + Union { + doc: String, + name: String, + cases: Vec, + }, + Enum { + doc: String, + name: String, + cases: Vec, + }, + ErrorEnum { + doc: String, + name: String, + cases: Vec, + }, +} + +impl From<&ScSpecTypeDef> for Type { + fn from(spec: &ScSpecTypeDef) -> Self { + match spec { + ScSpecTypeDef::Map(map) => Type::Map { + key: Box::new(Type::from(map.key_type.as_ref())), + value: Box::new(Type::from(map.value_type.as_ref())), + }, + ScSpecTypeDef::Option(opt) => Type::Option { + value: Box::new(Type::from(opt.value_type.as_ref())), + }, + ScSpecTypeDef::Result(res) => Type::Result { + value: Box::new(Type::from(res.ok_type.as_ref())), + error: Box::new(Type::from(res.error_type.as_ref())), + }, + ScSpecTypeDef::Tuple(tuple) => Type::Tuple { + elements: tuple.value_types.iter().map(Type::from).collect(), + }, + ScSpecTypeDef::Vec(vec) => Type::Vec { + element: Box::new(Type::from(vec.element_type.as_ref())), + }, + ScSpecTypeDef::Udt(udt) => Type::Custom { + name: udt.name.to_utf8_string_lossy(), + }, + ScSpecTypeDef::BytesN(b) => Type::BytesN { n: b.n }, + ScSpecTypeDef::Val => Type::Val, + ScSpecTypeDef::U64 => Type::U64, + ScSpecTypeDef::I64 => Type::I64, + ScSpecTypeDef::U32 => Type::U32, + ScSpecTypeDef::I32 => Type::I32, + ScSpecTypeDef::U128 => Type::U128, + ScSpecTypeDef::I128 => Type::I128, + ScSpecTypeDef::U256 => Type::U256, + ScSpecTypeDef::I256 => Type::I256, + ScSpecTypeDef::Bool => Type::Bool, + ScSpecTypeDef::Symbol => Type::Symbol, + ScSpecTypeDef::Error => Type::Error { message: None }, + ScSpecTypeDef::Bytes => Type::Bytes, + ScSpecTypeDef::String => Type::String, + ScSpecTypeDef::Address => Type::Address, + ScSpecTypeDef::Void => Type::Void, + ScSpecTypeDef::Timepoint => Type::Timepoint, + ScSpecTypeDef::Duration => Type::Duration, + } + } +} + +impl From<&ScSpecEntry> for Entry { + fn from(spec: &ScSpecEntry) -> Self { + match spec { + ScSpecEntry::FunctionV0(f) => Entry::Function { + doc: f.doc.to_utf8_string_lossy(), + name: f.name.to_utf8_string_lossy(), + inputs: f.inputs.iter().map(Into::into).collect(), + outputs: f.outputs.iter().map(Into::into).collect(), + }, + ScSpecEntry::UdtStructV0(s) if is_tuple_strukt(s) => Entry::TupleStruct { + doc: s.doc.to_utf8_string_lossy(), + name: s.name.to_utf8_string_lossy(), + fields: s.fields.iter().map(|f| &f.type_).map(Into::into).collect(), + }, + ScSpecEntry::UdtStructV0(s) => Entry::Struct { + doc: s.doc.to_utf8_string_lossy(), + name: s.name.to_utf8_string_lossy(), + fields: s.fields.iter().map(Into::into).collect(), + }, + ScSpecEntry::UdtUnionV0(u) => Entry::Union { + doc: u.doc.to_utf8_string_lossy(), + name: u.name.to_utf8_string_lossy(), + cases: u.cases.iter().map(Into::into).collect(), + }, + ScSpecEntry::UdtEnumV0(e) => Entry::Enum { + doc: e.doc.to_utf8_string_lossy(), + name: e.name.to_utf8_string_lossy(), + cases: e.cases.iter().map(Into::into).collect(), + }, + ScSpecEntry::UdtErrorEnumV0(e) => Entry::ErrorEnum { + doc: e.doc.to_utf8_string_lossy(), + name: e.name.to_utf8_string_lossy(), + cases: e.cases.iter().map(Into::into).collect(), + }, + } + } +} + +fn is_tuple_strukt(s: &ScSpecUdtStructV0) -> bool { + !s.fields.is_empty() && s.fields[0].name.to_utf8_string_lossy() == "0" +} diff --git a/cmd/crates/soroban-spec-typescript/src/wrapper.rs b/cmd/crates/soroban-spec-typescript/src/wrapper.rs new file mode 100644 index 00000000..d2a8d60a --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/src/wrapper.rs @@ -0,0 +1,55 @@ +use itertools::Itertools; + +use crate::types; + +pub fn type_to_js_xdr(value: &types::Type) -> String { + match value { + types::Type::Val => todo!(), + types::Type::U64 => "xdr.ScVal.scvU64(xdr.Uint64.fromString(i.toString()))".to_string(), + types::Type::I64 => "xdr.ScVal.scvI64(xdr.Int64.fromString(i.toString()))".to_string(), + types::Type::U32 => "xdr.ScVal.scvU32(i)".to_string(), + types::Type::I32 => "xdr.ScVal.scvI32(i)".to_string(), + types::Type::Bool => "xdr.ScVal.scvBool(i)".to_string(), + types::Type::Symbol => "xdr.ScVal.scvSymbol(i)".to_string(), + types::Type::Map { key, value } => format!( + "xdr.ScVal.scvMap(Array.from(i.entries()).map(([key, value]) => {{ + return new xdr.ScMapEntry({{ + key: ((i)=>{})(key), + val: ((i)=>{})(value)}}) + }}))", + type_to_js_xdr(key), + type_to_js_xdr(value) + ), + types::Type::Option { value } => format!( + "(!i) ? {} : {}", + type_to_js_xdr(&types::Type::Void), + type_to_js_xdr(value) + ), + types::Type::Result { value, .. } => type_to_js_xdr(value), + types::Type::Vec { element } => { + format!("xdr.ScVal.scvVec(i.map((i)=>{}))", type_to_js_xdr(element)) + } + types::Type::Tuple { elements } => { + let cases = elements + .iter() + .enumerate() + .map(|(i, e)| format!("((i) => {})(i[{i}])", type_to_js_xdr(e))) + .join(",\n "); + format!("xdr.ScVal.scvVec([{cases}])") + } + + types::Type::Custom { name } => format!("{name}ToXdr(i)"), + types::Type::BytesN { .. } | types::Type::Bytes => "xdr.ScVal.scvBytes(i)".to_owned(), + types::Type::Address => "addressToScVal(i)".to_owned(), + types::Type::Void => "xdr.ScVal.scvVoid()".to_owned(), + types::Type::U128 => "u128ToScVal(i)".to_owned(), + types::Type::I128 => "i128ToScVal(i)".to_owned(), + + types::Type::U256 | types::Type::I256 | types::Type::Timepoint | types::Type::Duration => { + "i".to_owned() + } + // This is case shouldn't happen since we only go xdr -> js for errors + types::Type::Error { .. } => "N/A".to_owned(), + types::Type::String => "xdr.ScVal.scvString(i)".to_owned(), + } +} diff --git a/cmd/crates/soroban-spec-typescript/ts-tests/.env b/cmd/crates/soroban-spec-typescript/ts-tests/.env new file mode 100644 index 00000000..39b24548 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/ts-tests/.env @@ -0,0 +1,2 @@ +SOROBAN_NETWORK_PASSPHRASE="Standalone Network ; February 2017" +SOROBAN_RPC_URL="http://localhost:8000/soroban/rpc" diff --git a/cmd/crates/soroban-spec-typescript/ts-tests/.eslintrc.cjs b/cmd/crates/soroban-spec-typescript/ts-tests/.eslintrc.cjs new file mode 100644 index 00000000..90af706b --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/ts-tests/.eslintrc.cjs @@ -0,0 +1,11 @@ +/* eslint-env node */ +module.exports = { + extends: ['eslint:recommended', 'plugin:@typescript-eslint/recommended'], + parser: '@typescript-eslint/parser', + parserOptions: { project: './tsconfig.json' }, + plugins: ['@typescript-eslint'], + root: true, + rules: { + "@typescript-eslint/no-floating-promises": ["error"] + }, +}; diff --git a/cmd/crates/soroban-spec-typescript/ts-tests/.gitignore b/cmd/crates/soroban-spec-typescript/ts-tests/.gitignore new file mode 100644 index 00000000..878823de --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/ts-tests/.gitignore @@ -0,0 +1,4 @@ +build +node_modules +yarn.lock +contract-*.txt diff --git a/cmd/crates/soroban-spec-typescript/ts-tests/initialize.sh b/cmd/crates/soroban-spec-typescript/ts-tests/initialize.sh new file mode 100755 index 00000000..621be2bc --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/ts-tests/initialize.sh @@ -0,0 +1,79 @@ +#!/bin/bash + +# read .env file, but prefer explicitly set environment variables +IFS=$'\n' +for l in $(cat .env); do + IFS='=' read -ra VARVAL <<< "$l" + # If variable with such name already exists, preserves its value + eval "export ${VARVAL[0]}=\${${VARVAL[0]}:-${VARVAL[1]}}" +done +unset IFS + +echo Network +echo " RPC: $SOROBAN_RPC_URL" +echo " Passphrase: \"$SOROBAN_NETWORK_PASSPHRASE\"" + +NETWORK_STATUS=$(curl -s -X POST "http://localhost:8000/soroban/rpc" -H "Content-Type: application/json" -d '{ "jsonrpc": "2.0", "id": 8675309, "method": "getHealth" }' | sed 's/.*"status":"\(.*\)".*/\1/') || { echo "Make sure you're running local RPC network on localhost:8000" && exit 1; } +echo " Status: $NETWORK_STATUS" + +if [[ "$NETWORK_STATUS" != "healthy" ]]; then + echo "Network is not healthy (not running?), exiting" + exit 1 +fi + +# Print command before executing, from https://stackoverflow.com/a/23342259/249801 +# Discussion: https://github.com/stellar/soroban-tools/pull/1034#pullrequestreview-1690667116 +exe() { echo"${@/eval/}" ; "$@" ; } + +function fund_all() { + exe eval "./soroban keys generate root" + exe eval "./soroban keys fund root" + exe eval "./soroban keys generate alice" + exe eval "./soroban keys fund alice" + exe eval "./soroban keys generate bob" + exe eval "./soroban keys fund bob" +} +function upload() { + exe eval "(./soroban contract $1 --source root --wasm $2 --ignore-checks) > $3" +} +function deploy() { + exe eval "(./soroban contract deploy --source root --wasm-hash $(cat $1) --ignore-checks) > $2" +} +function deploy_all() { + upload deploy ../../../../target/wasm32-unknown-unknown/test-wasms/test_custom_types.wasm contract-id-custom-types.txt + upload deploy ../../../../target/wasm32-unknown-unknown/test-wasms/test_hello_world.wasm contract-id-hello-world.txt + upload deploy ../../../../target/wasm32-unknown-unknown/test-wasms/test_swap.wasm contract-id-swap.txt + upload install ../../../../target/wasm32-unknown-unknown/test-wasms/test_token.wasm contract-token-hash.txt + deploy contract-token-hash.txt contract-id-token-a.txt + deploy contract-token-hash.txt contract-id-token-b.txt +} +function initialize() { + exe eval "./soroban contract invoke --source root --id $(cat $1) -- initialize --admin $(./soroban keys address root) --decimal 0 --name 'Token $2' --symbol '$2'" +} +function initialize_all() { + initialize contract-id-token-a.txt A + initialize contract-id-token-b.txt B +} +function bind() { + exe eval "./soroban contract bindings typescript --contract-id $(cat $1) --output-dir ./node_modules/$2 --overwrite" +} +function bind_all() { + bind contract-id-custom-types.txt test-custom-types + bind contract-id-hello-world.txt test-hello-world + bind contract-id-swap.txt test-swap + bind contract-id-token-a.txt token +} + +function mint() { + exe eval "./soroban contract invoke --source root --id $(cat $1) -- mint --amount 2000000 --to $(./soroban keys address $2)" +} +function mint_all() { + mint contract-id-token-a.txt alice + mint contract-id-token-b.txt bob +} + +fund_all +deploy_all +initialize_all +mint_all +bind_all diff --git a/cmd/crates/soroban-spec-typescript/ts-tests/package-lock.json b/cmd/crates/soroban-spec-typescript/ts-tests/package-lock.json new file mode 100644 index 00000000..36f5cdd6 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/ts-tests/package-lock.json @@ -0,0 +1,3405 @@ +{ + "name": "ts-tests", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "hasInstallScript": true, + "devDependencies": { + "@ava/typescript": "^4.1.0", + "@stellar/stellar-sdk": "11.2.0", + "@types/node": "^20.4.9", + "@typescript-eslint/eslint-plugin": "^6.10.0", + "@typescript-eslint/parser": "^6.10.0", + "ava": "^5.3.1", + "dotenv": "^16.3.1", + "eslint": "^8.53.0", + "typescript": "^5.3.3" + } + }, + "node_modules/@aashutoshrathi/word-wrap": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", + "integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@ava/typescript": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@ava/typescript/-/typescript-4.1.0.tgz", + "integrity": "sha512-1iWZQ/nr9iflhLK9VN8H+1oDZqe93qxNnyYUz+jTzkYPAHc5fdZXBrqmNIgIfFhWYXK5OaQ5YtC7OmLeTNhVEg==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^5.0.0", + "execa": "^7.1.1" + }, + "engines": { + "node": "^14.19 || ^16.15 || ^18 || ^20" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", + "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.10.0", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.10.0.tgz", + "integrity": "sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA==", + "dev": true, + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", + "dev": true, + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.6.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/js": { + "version": "8.55.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.55.0.tgz", + "integrity": "sha512-qQfo2mxH5yVom1kacMtZZJFVdW+E70mqHMJvVg6WTLo+VBuQJ4TojZlfWBjK0ve5BdEeNAVxOsl/nvNMpJOaJA==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.11.13", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.13.tgz", + "integrity": "sha512-JSBDMiDKSzQVngfRjOdFXgFfklaXI4K9nLF49Auh21lmBWRLIK3+xTErTWD4KU54pb6coM6ESE7Awz/FNU3zgQ==", + "dev": true, + "dependencies": { + "@humanwhocodes/object-schema": "^2.0.1", + "debug": "^4.1.1", + "minimatch": "^3.0.5" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.1.tgz", + "integrity": "sha512-dvuCeX5fC9dXgJn9t+X5atfmgQAzUOWqS1254Gh0m6i8wKd10ebXkfNKiRK+1GWi/yTvvLDHpoxLr0xxxeslWw==", + "dev": true + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@stellar/js-xdr": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@stellar/js-xdr/-/js-xdr-3.0.1.tgz", + "integrity": "sha512-dp5Eh7Nr1YjiIeqpdkj2cQYxfoPudDAH3ck8MWggp48Htw66Z/hUssNYUQG/OftLjEmHT90Z/dtey2Y77DOxIw==", + "dev": true + }, + "node_modules/@stellar/stellar-base": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/@stellar/stellar-base/-/stellar-base-10.0.1.tgz", + "integrity": "sha512-BDbx7VHOEQh+4J3Q+gStNXgPaNckVFmD4aOlBBGwxlF6vPFmVnW8IoJdkX7T58zpX55eWI6DXvEhDBlrqTlhAQ==", + "dev": true, + "dependencies": { + "@stellar/js-xdr": "^3.0.1", + "base32.js": "^0.1.0", + "bignumber.js": "^9.1.2", + "buffer": "^6.0.3", + "sha.js": "^2.3.6", + "tweetnacl": "^1.0.3" + }, + "optionalDependencies": { + "sodium-native": "^4.0.1" + } + }, + "node_modules/@stellar/stellar-sdk": { + "version": "11.2.0", + "resolved": "https://registry.npmjs.org/@stellar/stellar-sdk/-/stellar-sdk-11.2.0.tgz", + "integrity": "sha512-qInRR+mLLl9O/AI6Q+Sr19RZeYJtlNoJQJi3pch5BYoMvVhjO8IU8AhHADP//Zmc2osyogwPuqXBiFdaGlfHWA==", + "dev": true, + "dependencies": { + "@stellar/stellar-base": "10.0.1", + "axios": "^1.6.5", + "bignumber.js": "^9.1.2", + "eventsource": "^2.0.2", + "randombytes": "^2.1.0", + "toml": "^3.0.0", + "urijs": "^1.19.1" + } + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true + }, + "node_modules/@types/node": { + "version": "20.10.4", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.10.4.tgz", + "integrity": "sha512-D08YG6rr8X90YB56tSIuBaddy/UXAA9RKJoFvrsnogAum/0pmjkgi4+2nx96A330FmioegBWmEYQ+syqCFaveg==", + "dev": true, + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@types/semver": { + "version": "7.5.6", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.6.tgz", + "integrity": "sha512-dn1l8LaMea/IjDoHNd9J52uBbInB796CDffS6VdIxvqYCPSG0V0DzHp76GpaWnlhg88uYyPbXCDIowa86ybd5A==", + "dev": true + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.14.0.tgz", + "integrity": "sha512-1ZJBykBCXaSHG94vMMKmiHoL0MhNHKSVlcHVYZNw+BKxufhqQVTOawNpwwI1P5nIFZ/4jLVop0mcY6mJJDFNaw==", + "dev": true, + "dependencies": { + "@eslint-community/regexpp": "^4.5.1", + "@typescript-eslint/scope-manager": "6.14.0", + "@typescript-eslint/type-utils": "6.14.0", + "@typescript-eslint/utils": "6.14.0", + "@typescript-eslint/visitor-keys": "6.14.0", + "debug": "^4.3.4", + "graphemer": "^1.4.0", + "ignore": "^5.2.4", + "natural-compare": "^1.4.0", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^6.0.0 || ^6.0.0-alpha", + "eslint": "^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.14.0.tgz", + "integrity": "sha512-QjToC14CKacd4Pa7JK4GeB/vHmWFJckec49FR4hmIRf97+KXole0T97xxu9IFiPxVQ1DBWrQ5wreLwAGwWAVQA==", + "dev": true, + "dependencies": { + "@typescript-eslint/scope-manager": "6.14.0", + "@typescript-eslint/types": "6.14.0", + "@typescript-eslint/typescript-estree": "6.14.0", + "@typescript-eslint/visitor-keys": "6.14.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.14.0.tgz", + "integrity": "sha512-VT7CFWHbZipPncAZtuALr9y3EuzY1b1t1AEkIq2bTXUPKw+pHoXflGNG5L+Gv6nKul1cz1VH8fz16IThIU0tdg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.14.0", + "@typescript-eslint/visitor-keys": "6.14.0" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.14.0.tgz", + "integrity": "sha512-x6OC9Q7HfYKqjnuNu5a7kffIYs3No30isapRBJl1iCHLitD8O0lFbRcVGiOcuyN837fqXzPZ1NS10maQzZMKqw==", + "dev": true, + "dependencies": { + "@typescript-eslint/typescript-estree": "6.14.0", + "@typescript-eslint/utils": "6.14.0", + "debug": "^4.3.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/types": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.14.0.tgz", + "integrity": "sha512-uty9H2K4Xs8E47z3SnXEPRNDfsis8JO27amp2GNCnzGETEW3yTqEIVg5+AI7U276oGF/tw6ZA+UesxeQ104ceA==", + "dev": true, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.14.0.tgz", + "integrity": "sha512-yPkaLwK0yH2mZKFE/bXkPAkkFgOv15GJAUzgUVonAbv0Hr4PK/N2yaA/4XQbTZQdygiDkpt5DkxPELqHguNvyw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.14.0", + "@typescript-eslint/visitor-keys": "6.14.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.14.0.tgz", + "integrity": "sha512-XwRTnbvRr7Ey9a1NT6jqdKX8y/atWG+8fAIu3z73HSP8h06i3r/ClMhmaF/RGWGW1tHJEwij1uEg2GbEmPYvYg==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "@types/json-schema": "^7.0.12", + "@types/semver": "^7.5.0", + "@typescript-eslint/scope-manager": "6.14.0", + "@typescript-eslint/types": "6.14.0", + "@typescript-eslint/typescript-estree": "6.14.0", + "semver": "^7.5.4" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.14.0.tgz", + "integrity": "sha512-fB5cw6GRhJUz03MrROVuj5Zm/Q+XWlVdIsFj+Zb1Hvqouc8t+XP2H5y53QYU/MGtd2dPg6/vJJlhoX3xc2ehfw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.14.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@ungap/structured-clone": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", + "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", + "dev": true + }, + "node_modules/acorn": { + "version": "8.11.2", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.2.tgz", + "integrity": "sha512-nc0Axzp/0FILLEVsm4fNwLCwMttvhEI263QtVPQcbpfZZ3ts0hLsZGOpE6czNlid7CJ9MlyH8reXkpsf3YUY4w==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/acorn-walk": { + "version": "8.3.1", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.1.tgz", + "integrity": "sha512-TgUZgYvqZprrl7YldZNoa9OciCAyZR+Ejm9eXzKCmjsF5IKp/wgQ7Z/ZpjpGTIUPwrHQIcYeI8qDh4PsEwxMbw==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/aggregate-error": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-4.0.1.tgz", + "integrity": "sha512-0poP0T7el6Vq3rstR8Mn4V/IQrpBLO6POkUSrN7RhyY+GF/InCFShQzsQ39T25gkHhLgSLByyAz+Kjb+c2L98w==", + "dev": true, + "dependencies": { + "clean-stack": "^4.0.0", + "indent-string": "^5.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/array-find-index": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-find-index/-/array-find-index-1.0.2.tgz", + "integrity": "sha512-M1HQyIXcBGtVywBt8WVdim+lrNaK7VHp99Qt5pSNziXznKHViIBbXWtfRTpEFpF/c4FdfxNAsCCwPp5phBYJtw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/arrgv": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/arrgv/-/arrgv-1.0.2.tgz", + "integrity": "sha512-a4eg4yhp7mmruZDQFqVMlxNRFGi/i1r87pt8SDHy0/I8PqSXoUTlWZRdAZo0VXgvEARcujbtTk8kiZRi1uDGRw==", + "dev": true, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/arrify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-3.0.0.tgz", + "integrity": "sha512-tLkvA81vQG/XqE2mjDkGQHoOINtMHtysSnemrmoGe6PydDPMRbVugqyk4A6V/WDWEfm3l+0d8anA9r8cv/5Jaw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "dev": true + }, + "node_modules/ava": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/ava/-/ava-5.3.1.tgz", + "integrity": "sha512-Scv9a4gMOXB6+ni4toLuhAm9KYWEjsgBglJl+kMGI5+IVDt120CCDZyB5HNU9DjmLI2t4I0GbnxGLmmRfGTJGg==", + "dev": true, + "dependencies": { + "acorn": "^8.8.2", + "acorn-walk": "^8.2.0", + "ansi-styles": "^6.2.1", + "arrgv": "^1.0.2", + "arrify": "^3.0.0", + "callsites": "^4.0.0", + "cbor": "^8.1.0", + "chalk": "^5.2.0", + "chokidar": "^3.5.3", + "chunkd": "^2.0.1", + "ci-info": "^3.8.0", + "ci-parallel-vars": "^1.0.1", + "clean-yaml-object": "^0.1.0", + "cli-truncate": "^3.1.0", + "code-excerpt": "^4.0.0", + "common-path-prefix": "^3.0.0", + "concordance": "^5.0.4", + "currently-unhandled": "^0.4.1", + "debug": "^4.3.4", + "emittery": "^1.0.1", + "figures": "^5.0.0", + "globby": "^13.1.4", + "ignore-by-default": "^2.1.0", + "indent-string": "^5.0.0", + "is-error": "^2.2.2", + "is-plain-object": "^5.0.0", + "is-promise": "^4.0.0", + "matcher": "^5.0.0", + "mem": "^9.0.2", + "ms": "^2.1.3", + "p-event": "^5.0.1", + "p-map": "^5.5.0", + "picomatch": "^2.3.1", + "pkg-conf": "^4.0.0", + "plur": "^5.1.0", + "pretty-ms": "^8.0.0", + "resolve-cwd": "^3.0.0", + "stack-utils": "^2.0.6", + "strip-ansi": "^7.0.1", + "supertap": "^3.0.1", + "temp-dir": "^3.0.0", + "write-file-atomic": "^5.0.1", + "yargs": "^17.7.2" + }, + "bin": { + "ava": "entrypoints/cli.mjs" + }, + "engines": { + "node": ">=14.19 <15 || >=16.15 <17 || >=18" + }, + "peerDependencies": { + "@ava/typescript": "*" + }, + "peerDependenciesMeta": { + "@ava/typescript": { + "optional": true + } + } + }, + "node_modules/ava/node_modules/globby": { + "version": "13.2.2", + "resolved": "https://registry.npmjs.org/globby/-/globby-13.2.2.tgz", + "integrity": "sha512-Y1zNGV+pzQdh7H39l9zgB4PJqjRNqydvdYCDG4HFXM4XuvSaQQlEc91IU1yALL8gUTDomgBAfz3XJdmUS+oo0w==", + "dev": true, + "dependencies": { + "dir-glob": "^3.0.1", + "fast-glob": "^3.3.0", + "ignore": "^5.2.4", + "merge2": "^1.4.1", + "slash": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ava/node_modules/slash": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz", + "integrity": "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/axios": { + "version": "1.6.5", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.5.tgz", + "integrity": "sha512-Ii012v05KEVuUoFWmMW/UQv9aRIc3ZwkWDcM+h5Il8izZCtRVpDUfwpoFf7eOtajT3QiGR4yDUx7lPqHJULgbg==", + "dev": true, + "dependencies": { + "follow-redirects": "^1.15.4", + "form-data": "^4.0.0", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/base32.js": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/base32.js/-/base32.js-0.1.0.tgz", + "integrity": "sha512-n3TkB02ixgBOhTvANakDb4xaMXnYUVkNoRFJjQflcqMQhyEKxEHdj3E6N8t8sUQ0mjH/3/JxzlXuz3ul/J90pQ==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/bignumber.js": { + "version": "9.1.2", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.1.2.tgz", + "integrity": "sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/blueimp-md5": { + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/blueimp-md5/-/blueimp-md5-2.19.0.tgz", + "integrity": "sha512-DRQrD6gJyy8FbiE4s+bDoXS9hiW3Vbx5uCdwvcCf3zLHL+Iv7LtGHLpr+GZV8rHG8tK766FGYBwRbu8pELTt+w==", + "dev": true + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/callsites": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-4.1.0.tgz", + "integrity": "sha512-aBMbD1Xxay75ViYezwT40aQONfr+pSXTHwNKvIXhXD6+LY3F1dLIcceoC5OZKBVHbXcysz1hL9D2w0JJIMXpUw==", + "dev": true, + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cbor": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/cbor/-/cbor-8.1.0.tgz", + "integrity": "sha512-DwGjNW9omn6EwP70aXsn7FQJx5kO12tX0bZkaTjzdVFM6/7nhA4t0EENocKGx6D2Bch9PE2KzCUf5SceBdeijg==", + "dev": true, + "dependencies": { + "nofilter": "^3.1.0" + }, + "engines": { + "node": ">=12.19" + } + }, + "node_modules/chalk": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", + "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", + "dev": true, + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chokidar": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + ], + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chunkd": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/chunkd/-/chunkd-2.0.1.tgz", + "integrity": "sha512-7d58XsFmOq0j6el67Ug9mHf9ELUXsQXYJBkyxhH/k+6Ke0qXRnv0kbemx+Twc6fRJ07C49lcbdgm9FL1Ei/6SQ==", + "dev": true + }, + "node_modules/ci-info": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", + "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "engines": { + "node": ">=8" + } + }, + "node_modules/ci-parallel-vars": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ci-parallel-vars/-/ci-parallel-vars-1.0.1.tgz", + "integrity": "sha512-uvzpYrpmidaoxvIQHM+rKSrigjOe9feHYbw4uOI2gdfe1C3xIlxO+kVXq83WQWNniTf8bAxVpy+cQeFQsMERKg==", + "dev": true + }, + "node_modules/clean-stack": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-4.2.0.tgz", + "integrity": "sha512-LYv6XPxoyODi36Dp976riBtSY27VmFo+MKqEU9QCCWyTrdEPDog+RWA7xQWHi6Vbp61j5c4cdzzX1NidnwtUWg==", + "dev": true, + "dependencies": { + "escape-string-regexp": "5.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/clean-yaml-object": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/clean-yaml-object/-/clean-yaml-object-0.1.0.tgz", + "integrity": "sha512-3yONmlN9CSAkzNwnRCiJQ7Q2xK5mWuEfL3PuTZcAUzhObbXsfsnMptJzXwz93nc5zn9V9TwCVMmV7w4xsm43dw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/cli-truncate": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-3.1.0.tgz", + "integrity": "sha512-wfOBkjXteqSnI59oPcJkcPl/ZmwvMMOj340qUIY1SKZCv0B9Cf4D4fAucRkIKQmsIuYK3x1rrgU7MeGRruiuiA==", + "dev": true, + "dependencies": { + "slice-ansi": "^5.0.0", + "string-width": "^5.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/cliui/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/cliui/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/code-excerpt": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/code-excerpt/-/code-excerpt-4.0.0.tgz", + "integrity": "sha512-xxodCmBen3iy2i0WtAK8FlFNrRzjUqjRsMfho58xT/wvZU1YTM3fCnRjcy1gJPMepaRlgm/0e6w8SpWHpn3/cA==", + "dev": true, + "dependencies": { + "convert-to-spaces": "^2.0.1" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dev": true, + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/common-path-prefix": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/common-path-prefix/-/common-path-prefix-3.0.0.tgz", + "integrity": "sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w==", + "dev": true + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "node_modules/concordance": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/concordance/-/concordance-5.0.4.tgz", + "integrity": "sha512-OAcsnTEYu1ARJqWVGwf4zh4JDfHZEaSNlNccFmt8YjB2l/n19/PF2viLINHc57vO4FKIAFl2FWASIGZZWZ2Kxw==", + "dev": true, + "dependencies": { + "date-time": "^3.1.0", + "esutils": "^2.0.3", + "fast-diff": "^1.2.0", + "js-string-escape": "^1.0.1", + "lodash": "^4.17.15", + "md5-hex": "^3.0.1", + "semver": "^7.3.2", + "well-known-symbols": "^2.0.0" + }, + "engines": { + "node": ">=10.18.0 <11 || >=12.14.0 <13 || >=14" + } + }, + "node_modules/convert-to-spaces": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/convert-to-spaces/-/convert-to-spaces-2.0.1.tgz", + "integrity": "sha512-rcQ1bsQO9799wq24uE5AM2tAILy4gXGIK/njFWcVQkGNZ96edlpY+A7bjwvzjYvLDyzmG1MmMLZhpcsb+klNMQ==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/currently-unhandled": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/currently-unhandled/-/currently-unhandled-0.4.1.tgz", + "integrity": "sha512-/fITjgjGU50vjQ4FH6eUoYu+iUoUKIXws2hL15JJpIR+BbTxaXQsMuuyjtNh2WqsSBS5nsaZHFsFecyw5CCAng==", + "dev": true, + "dependencies": { + "array-find-index": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/date-time": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/date-time/-/date-time-3.1.0.tgz", + "integrity": "sha512-uqCUKXE5q1PNBXjPqvwhwJf9SwMoAHBgWJ6DcrnS5o+W2JOiIILl0JEdVD8SGujrNS02GGxgwAg2PN2zONgtjg==", + "dev": true, + "dependencies": { + "time-zone": "^1.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/debug/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/dotenv": { + "version": "16.3.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.3.1.tgz", + "integrity": "sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/motdotla/dotenv?sponsor=1" + } + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true + }, + "node_modules/emittery": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/emittery/-/emittery-1.0.1.tgz", + "integrity": "sha512-2ID6FdrMD9KDLldGesP6317G78K7km/kMcwItRtVFva7I/cSEOIaLpewaUb+YLXVwdAp3Ctfxh/V5zIl1sj7dQ==", + "dev": true, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sindresorhus/emittery?sponsor=1" + } + }, + "node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", + "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "8.55.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.55.0.tgz", + "integrity": "sha512-iyUUAM0PCKj5QpwGfmCAG9XXbZCWsqP/eWAWrG/W0umvjuLRBECwSFdt+rCntju0xEH7teIABPwXpahftIaTdA==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.55.0", + "@humanwhocodes/config-array": "^0.11.13", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "graphemer": "^1.4.0", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3", + "strip-ansi": "^6.0.1", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-scope": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/eslint/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/eslint/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/eslint/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/eslint/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/espree": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", + "dev": true, + "dependencies": { + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/esquery": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", + "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", + "dev": true, + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eventsource": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz", + "integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==", + "dev": true, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/execa": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-7.2.0.tgz", + "integrity": "sha512-UduyVP7TLB5IcAQl+OzLyLcS/l32W/GLg+AhHJ+ow40FOk2U3SAllPwR44v4vmdFwIWqpdwxxpQbF1n5ta9seA==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.1", + "human-signals": "^4.3.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^3.0.7", + "strip-final-newline": "^3.0.0" + }, + "engines": { + "node": "^14.18.0 || ^16.14.0 || >=18.0.0" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "node_modules/fast-diff": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.3.0.tgz", + "integrity": "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==", + "dev": true + }, + "node_modules/fast-glob": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", + "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true + }, + "node_modules/fastq": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz", + "integrity": "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/figures": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-5.0.0.tgz", + "integrity": "sha512-ej8ksPF4x6e5wvK9yevct0UCXh8TTFlWGVLlgjZuoBH1HwjIfKE/IdL5mq89sFA7zELi1VhKpmtDnrs7zWyeyg==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^5.0.0", + "is-unicode-supported": "^1.2.0" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", + "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", + "dev": true, + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.3", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flatted": { + "version": "3.2.9", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.9.tgz", + "integrity": "sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ==", + "dev": true + }, + "node_modules/follow-redirects": { + "version": "1.15.4", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz", + "integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dev": true, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/globals": { + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", + "dev": true, + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/human-signals": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-4.3.1.tgz", + "integrity": "sha512-nZXjEF2nbo7lIw3mgYjItAfgQXog3OjJogSbKa2CQIIvSGWcKgeJnQlNXip6NglNzYH45nSRiEVimMvYL8DDqQ==", + "dev": true, + "engines": { + "node": ">=14.18.0" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/ignore": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.0.tgz", + "integrity": "sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/ignore-by-default": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-2.1.0.tgz", + "integrity": "sha512-yiWd4GVmJp0Q6ghmM2B/V3oZGRmjrKLXvHR3TE1nfoXsmoggllfZUQe74EN0fJdPFZu2NIvNdrMMLm3OsV7Ohw==", + "dev": true, + "engines": { + "node": ">=10 <11 || >=12 <13 || >=14" + } + }, + "node_modules/import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/indent-string": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-5.0.0.tgz", + "integrity": "sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/irregular-plurals": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/irregular-plurals/-/irregular-plurals-3.5.0.tgz", + "integrity": "sha512-1ANGLZ+Nkv1ptFb2pa8oG8Lem4krflKuX/gINiHJHjJUKaJHk/SXk5x6K3J+39/p0h1RQ2saROclJJ+QLvETCQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-error": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/is-error/-/is-error-2.2.2.tgz", + "integrity": "sha512-IOQqts/aHWbiisY5DuPJQ0gcbvaLFCa7fBa9xoLfxBZvQ+ZI/Zh9xoI7Gk+G64N0FdK4AbibytHht2tWgpJWLg==", + "dev": true + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-4.0.0.tgz", + "integrity": "sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-plain-object": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", + "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-promise": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", + "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==", + "dev": true + }, + "node_modules/is-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-unicode-supported": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-1.3.0.tgz", + "integrity": "sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/js-string-escape": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/js-string-escape/-/js-string-escape-1.0.1.tgz", + "integrity": "sha512-Smw4xcfIQ5LVjAOuJCvN/zIodzA/BBSsluuoSykP+lUvScIi4U6RJLfwHet5cxFnCswUjISV8oAXaqaJDY3chg==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/load-json-file": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-7.0.1.tgz", + "integrity": "sha512-Gnxj3ev3mB5TkVBGad0JM6dmLiQL+o0t23JPBZ9sd+yvSLk05mFoqKBw5N8gbbkU4TNXyqCgIrl/VM17OgUIgQ==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true + }, + "node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/map-age-cleaner": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz", + "integrity": "sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w==", + "dev": true, + "dependencies": { + "p-defer": "^1.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/matcher": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/matcher/-/matcher-5.0.0.tgz", + "integrity": "sha512-s2EMBOWtXFc8dgqvoAzKJXxNHibcdJMV0gwqKUaw9E2JBJuGUK7DrNKrA6g/i+v72TT16+6sVm5mS3thaMLQUw==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^5.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/md5-hex": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/md5-hex/-/md5-hex-3.0.1.tgz", + "integrity": "sha512-BUiRtTtV39LIJwinWBjqVsU9xhdnz7/i889V859IBFpuqGAj6LuOvHv5XLbgZ2R7ptJoJaEcxkv88/h25T7Ciw==", + "dev": true, + "dependencies": { + "blueimp-md5": "^2.10.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/mem": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/mem/-/mem-9.0.2.tgz", + "integrity": "sha512-F2t4YIv9XQUBHt6AOJ0y7lSmP1+cY7Fm1DRh9GClTGzKST7UWLMx6ly9WZdLH/G/ppM5RL4MlQfRT71ri9t19A==", + "dev": true, + "dependencies": { + "map-age-cleaner": "^0.1.3", + "mimic-fn": "^4.0.0" + }, + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sindresorhus/mem?sponsor=1" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "dev": true, + "dependencies": { + "braces": "^3.0.2", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dev": true, + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", + "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true + }, + "node_modules/node-gyp-build": { + "version": "4.7.1", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.7.1.tgz", + "integrity": "sha512-wTSrZ+8lsRRa3I3H8Xr65dLWSgCvY2l4AOnaeKdPA9TB/WYMPaTcrzf3rXvFoVvjKNVnu0CcWSx54qq9GKRUYg==", + "dev": true, + "optional": true, + "bin": { + "node-gyp-build": "bin.js", + "node-gyp-build-optional": "optional.js", + "node-gyp-build-test": "build-test.js" + } + }, + "node_modules/nofilter": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/nofilter/-/nofilter-3.1.0.tgz", + "integrity": "sha512-l2NNj07e9afPnhAhvgVrCD/oy2Ai1yfLpuo3EpiO1jFTsB4sFz6oIfAfSZyQzVpkZQ9xS8ZS5g1jCBgq4Hwo0g==", + "dev": true, + "engines": { + "node": ">=12.19" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-run-path": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.1.0.tgz", + "integrity": "sha512-sJOdmRGrY2sjNTRMbSvluQqg+8X7ZK61yvzBEIDhz4f8z1TZFYABsqjjCBd/0PUNE9M6QDgHJXQkGUEm7Q+l9Q==", + "dev": true, + "dependencies": { + "path-key": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm-run-path/node_modules/path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", + "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", + "dev": true, + "dependencies": { + "mimic-fn": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/optionator": { + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", + "integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==", + "dev": true, + "dependencies": { + "@aashutoshrathi/word-wrap": "^1.2.3", + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-defer": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-defer/-/p-defer-1.0.0.tgz", + "integrity": "sha512-wB3wfAxZpk2AzOfUMJNL+d36xothRSyj8EXOa4f6GMqYDN9BJaaSISbsk+wS9abmnebVw95C2Kb5t85UmpCxuw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/p-event": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/p-event/-/p-event-5.0.1.tgz", + "integrity": "sha512-dd589iCQ7m1L0bmC5NLlVYfy3TbBEsMUfWx9PyAgPeIcFZ/E2yaTZ4Rz4MiBmmJShviiftHVXOqfnfzJ6kyMrQ==", + "dev": true, + "dependencies": { + "p-timeout": "^5.0.2" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-map": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-5.5.0.tgz", + "integrity": "sha512-VFqfGDHlx87K66yZrNdI4YGtD70IRyd+zSvgks6mzHPRNkoKy+9EKP4SFC77/vTTQYmRmti7dvqC+m5jBrBAcg==", + "dev": true, + "dependencies": { + "aggregate-error": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-timeout": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-5.1.0.tgz", + "integrity": "sha512-auFDyzzzGZZZdHz3BtET9VEz0SE/uMEAx7uWfGPucfzEwwe/xH0iVeZibQmANYE/hp9T2+UUZT5m+BKyrDp3Ew==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parent-module/node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-ms": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-3.0.0.tgz", + "integrity": "sha512-Tpb8Z7r7XbbtBTrM9UhpkzzaMrqA2VXMT3YChzYltwV3P3pM6t8wl7TvpMnSTosz1aQAdVib7kdoys7vYOPerw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pkg-conf": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/pkg-conf/-/pkg-conf-4.0.0.tgz", + "integrity": "sha512-7dmgi4UY4qk+4mj5Cd8v/GExPo0K+SlY+hulOSdfZ/T6jVH6//y7NtzZo5WrfhDBxuQ0jCa7fLZmNaNh7EWL/w==", + "dev": true, + "dependencies": { + "find-up": "^6.0.0", + "load-json-file": "^7.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-conf/node_modules/find-up": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-6.3.0.tgz", + "integrity": "sha512-v2ZsoEuVHYy8ZIlYqwPe/39Cy+cFDzp4dXPaxNvkEuouymu+2Jbz0PxpKarJHYJTmv2HWT3O382qY8l4jMWthw==", + "dev": true, + "dependencies": { + "locate-path": "^7.1.0", + "path-exists": "^5.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-conf/node_modules/locate-path": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-7.2.0.tgz", + "integrity": "sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==", + "dev": true, + "dependencies": { + "p-locate": "^6.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-conf/node_modules/p-limit": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-4.0.0.tgz", + "integrity": "sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^1.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-conf/node_modules/p-locate": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-6.0.0.tgz", + "integrity": "sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw==", + "dev": true, + "dependencies": { + "p-limit": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-conf/node_modules/path-exists": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-5.0.0.tgz", + "integrity": "sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + } + }, + "node_modules/pkg-conf/node_modules/yocto-queue": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.0.0.tgz", + "integrity": "sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g==", + "dev": true, + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/plur": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/plur/-/plur-5.1.0.tgz", + "integrity": "sha512-VP/72JeXqak2KiOzjgKtQen5y3IZHn+9GOuLDafPv0eXa47xq0At93XahYBs26MsifCQ4enGKwbjBTKgb9QJXg==", + "dev": true, + "dependencies": { + "irregular-plurals": "^3.3.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/pretty-ms": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-8.0.0.tgz", + "integrity": "sha512-ASJqOugUF1bbzI35STMBUpZqdfYKlJugy6JBziGi2EE+AL5JPJGSzvpeVXojxrr0ViUYoToUjb5kjSEGf7Y83Q==", + "dev": true, + "dependencies": { + "parse-ms": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "dev": true + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve-cwd": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", + "dev": true, + "dependencies": { + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-cwd/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/serialize-error": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/serialize-error/-/serialize-error-7.0.1.tgz", + "integrity": "sha512-8I8TjW5KMOKsZQTvoxjuSIa7foAwPWGOts+6o7sgjz41/qMD9VQHEDxi6PBvK2l0MXUmqZyNpUK+T2tQaaElvw==", + "dev": true, + "dependencies": { + "type-fest": "^0.13.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/serialize-error/node_modules/type-fest": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.13.1.tgz", + "integrity": "sha512-34R7HTnG0XIJcBSn5XhDd7nNFPRcXYRZrBB2O2jdKqYODldSzBAqzsWoZYYvduky73toYS/ESqxPvkDf/F0XMg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/sha.js": { + "version": "2.4.11", + "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", + "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", + "dev": true, + "dependencies": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + }, + "bin": { + "sha.js": "bin.js" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/slice-ansi": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-5.0.0.tgz", + "integrity": "sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^6.0.0", + "is-fullwidth-code-point": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/sodium-native": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/sodium-native/-/sodium-native-4.0.4.tgz", + "integrity": "sha512-faqOKw4WQKK7r/ybn6Lqo1F9+L5T6NlBJJYvpxbZPetpWylUVqz449mvlwIBKBqxEHbWakWuOlUt8J3Qpc4sWw==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "dependencies": { + "node-gyp-build": "^4.6.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true + }, + "node_modules/stack-utils": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", + "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/stack-utils/node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/strip-final-newline": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", + "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supertap": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/supertap/-/supertap-3.0.1.tgz", + "integrity": "sha512-u1ZpIBCawJnO+0QePsEiOknOfCRq0yERxiAchT0i4li0WHNUJbf0evXXSXOcCAR4M8iMDoajXYmstm/qO81Isw==", + "dev": true, + "dependencies": { + "indent-string": "^5.0.0", + "js-yaml": "^3.14.1", + "serialize-error": "^7.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + } + }, + "node_modules/supertap/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/supertap/node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/temp-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/temp-dir/-/temp-dir-3.0.0.tgz", + "integrity": "sha512-nHc6S/bwIilKHNRgK/3jlhDoIHcp45YgyiwcAk46Tr0LfEqGBVpmiAyuiuxeVE44m3mXnEeVhaipLOEWmH+Njw==", + "dev": true, + "engines": { + "node": ">=14.16" + } + }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", + "dev": true + }, + "node_modules/time-zone": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/time-zone/-/time-zone-1.0.0.tgz", + "integrity": "sha512-TIsDdtKo6+XrPtiTm1ssmMngN1sAhyKnTO2kunQWqNPWIVvCm15Wmw4SWInwTVgJ5u/Tr04+8Ei9TNcw4x4ONA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/toml": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/toml/-/toml-3.0.0.tgz", + "integrity": "sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w==", + "dev": true + }, + "node_modules/ts-api-utils": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.0.3.tgz", + "integrity": "sha512-wNMeqtMz5NtwpT/UZGY5alT+VoKdSsOOP/kqHFcUW1P/VRhH2wJ48+DN2WwUliNbQ976ETwDL0Ifd2VVvgonvg==", + "dev": true, + "engines": { + "node": ">=16.13.0" + }, + "peerDependencies": { + "typescript": ">=4.2.0" + } + }, + "node_modules/tweetnacl": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-1.0.3.tgz", + "integrity": "sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw==", + "dev": true + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typescript": { + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz", + "integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "dev": true + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/urijs": { + "version": "1.19.11", + "resolved": "https://registry.npmjs.org/urijs/-/urijs-1.19.11.tgz", + "integrity": "sha512-HXgFDgDommxn5/bIv0cnQZsPhHDA90NPHD6+c/v21U5+Sx5hoP8+dP9IZXBU1gIfvdRfhG8cel9QNPeionfcCQ==", + "dev": true + }, + "node_modules/well-known-symbols": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/well-known-symbols/-/well-known-symbols-2.0.0.tgz", + "integrity": "sha512-ZMjC3ho+KXo0BfJb7JgtQ5IBuvnShdlACNkKkdsqBmYw3bPAaJfPeYUo6tLUaT5tG/Gkh7xkpBhKRQ9e7pyg9Q==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/wrap-ansi/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "node_modules/write-file-atomic": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-5.0.1.tgz", + "integrity": "sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==", + "dev": true, + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/write-file-atomic/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/yargs/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/cmd/crates/soroban-spec-typescript/ts-tests/package.json b/cmd/crates/soroban-spec-typescript/ts-tests/package.json new file mode 100644 index 00000000..c06446de --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/ts-tests/package.json @@ -0,0 +1,31 @@ +{ + "private": true, + "type": "module", + "scripts": { + "lint": "eslint src/*", + "postinstall": "./initialize.sh", + "test": "npm run lint && ava" + }, + "devDependencies": { + "@ava/typescript": "^4.1.0", + "@types/node": "^20.4.9", + "@typescript-eslint/eslint-plugin": "^6.10.0", + "@typescript-eslint/parser": "^6.10.0", + "ava": "^5.3.1", + "dotenv": "^16.3.1", + "eslint": "^8.53.0", + "@stellar/stellar-sdk": "11.2.0", + "typescript": "^5.3.3" + }, + "ava": { + "typescript": { + "rewritePaths": { + "src/": "build/" + }, + "compile": "tsc" + }, + "require": [ + "dotenv/config" + ] + } +} diff --git a/cmd/crates/soroban-spec-typescript/ts-tests/soroban b/cmd/crates/soroban-spec-typescript/ts-tests/soroban new file mode 100755 index 00000000..d98f247c --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/ts-tests/soroban @@ -0,0 +1,3 @@ +#!/bin/bash + +cargo run --quiet -p soroban-cli -- "$@" diff --git a/cmd/crates/soroban-spec-typescript/ts-tests/src/test-custom-types.ts b/cmd/crates/soroban-spec-typescript/ts-tests/src/test-custom-types.ts new file mode 100644 index 00000000..3b07dc3d --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/ts-tests/src/test-custom-types.ts @@ -0,0 +1,187 @@ +import test from 'ava' +import { root, rpcUrl, wallet } from './util.js' +import { Contract, Ok, Err, networks } from 'test-custom-types' + +const publicKey = root.keypair.publicKey(); + +const contract = new Contract({ ...networks.standalone, rpcUrl, wallet }); + +test('hello', async t => { + const { result } = await contract.hello({ hello: 'tests' }) + t.is(result, 'tests') +}) + +test('woid', async t => { + t.is((await contract.woid()).result, undefined) +}) + +test('u32_fail_on_even', async t => { + t.deepEqual( + (await contract.u32FailOnEven({ u32_: 1 })).result, + new Ok(1) + ) + t.deepEqual( + (await contract.u32FailOnEven({ u32_: 0 })).result, + new Err({ message: "Please provide an odd number" }) + ) +}) + +test('u32', async t => { + t.is((await contract.u32({ u32_: 1 })).result, 1) +}) + +test('i32', async t => { + t.is((await contract.i32({ i32_: 1 })).result, 1) +}) + +test('i64', async t => { + t.is((await contract.i64({ i64_: 1n })).result, 1n) +}) + +test("strukt_hel", async (t) => { + const test = { a: 0, b: true, c: "world" } + t.deepEqual((await contract.struktHel({ strukt: test })).result, ["Hello", "world"]) +}) + +test("strukt", async (t) => { + const test = { a: 0, b: true, c: "hello" } + t.deepEqual((await contract.strukt({ strukt: test })).result, test) +}) + +test('simple first', async t => { + const arg = { tag: 'First', values: undefined } as const + const ret = { tag: 'First' } + t.deepEqual((await contract.simple({ simple: arg })).result, ret) +}) + +test('simple second', async t => { + const arg = { tag: 'Second', values: undefined } as const + const ret = { tag: 'Second' } + t.deepEqual((await contract.simple({ simple: arg })).result, ret) +}) + +test('simple third', async t => { + const arg = { tag: 'Third', values: undefined } as const + const ret = { tag: 'Third' } + t.deepEqual((await contract.simple({ simple: arg })).result, ret) +}) + +test('complex with struct', async t => { + const arg = { tag: 'Struct', values: [{ a: 0, b: true, c: 'hello' }] } as const + const ret = { tag: 'Struct', values: [{ a: 0, b: true, c: 'hello' }] } + t.deepEqual((await contract.complex({ complex: arg })).result, ret) +}) + +test('complex with tuple', async t => { + const arg = { tag: 'Tuple', values: [[{ a: 0, b: true, c: 'hello' }, { tag: 'First', values: undefined }]] } as const + const ret = { tag: 'Tuple', values: [[{ a: 0, b: true, c: 'hello' }, { tag: 'First' }]] } + t.deepEqual((await contract.complex({ complex: arg })).result, ret) +}) + +test('complex with enum', async t => { + const arg = { tag: 'Enum', values: [{ tag: 'First', values: undefined }] } as const + const ret = { tag: 'Enum', values: [{ tag: 'First' }] } + t.deepEqual((await contract.complex({ complex: arg })).result, ret) +}) + +test('complex with asset', async t => { + const arg = { tag: 'Asset', values: [publicKey, 1n] } as const + const ret = { tag: 'Asset', values: [publicKey, 1n] } + t.deepEqual((await contract.complex({ complex: arg })).result, ret) +}) + +test('complex with void', async t => { + const arg = { tag: 'Void', values: undefined } as const + const ret = { tag: 'Void' } + t.deepEqual((await contract.complex({ complex: arg })).result, ret) +}) + +test('addresse', async t => { + t.deepEqual((await contract.addresse({ addresse: publicKey })).result, publicKey) +}) + +test('bytes', async t => { + const bytes = Buffer.from('hello') + t.deepEqual((await contract.bytes({ bytes })).result, bytes) +}) + +test('bytes_n', async t => { + const bytes_n = Buffer.from('123456789') // what's the correct way to construct bytes_n? + t.deepEqual((await contract.bytesN({ bytes_n })).result, bytes_n) +}) + +test('card', async t => { + const card = 11 + t.is((await contract.card({ card })).result, card) +}) + +test('boolean', async t => { + t.is((await contract.boolean({ boolean: true })).result, true) +}) + +test('not', async t => { + t.is((await contract.not({ boolean: true })).result, false) +}) + +test('i128', async t => { + t.is((await contract.i128({ i128: -1n })).result, -1n) +}) + +test('u128', async t => { + t.is((await contract.u128({ u128: 1n })).result, 1n) +}) + +test('multi_args', async t => { + t.is((await contract.multiArgs({ a: 1, b: true })).result, 1) + t.is((await contract.multiArgs({ a: 1, b: false })).result, 0) +}) + +test('map', async t => { + const map = new Map() + map.set(1, true) + map.set(2, false) + // map.set(3, 'hahaha') // should throw an error + const ret = Array.from(map.entries()) + t.deepEqual((await contract.map({ map })).result, ret) +}) + +test('vec', async t => { + const vec = [1, 2, 3] + t.deepEqual((await contract.vec({ vec })).result, vec) +}) + +test('tuple', async t => { + const tuple = ['hello', 1] as const + t.deepEqual((await contract.tuple({ tuple })).result, tuple) +}) + +test('option', async t => { + // this makes sense + t.deepEqual((await contract.option({ option: 1 })).result, 1) + + // this passes but shouldn't + t.deepEqual((await contract.option({ option: undefined })).result, undefined) + + // this is the behavior we probably want, but fails + // t.deepEqual(await contract.option(), undefined) // typing and implementation require the object + // t.deepEqual((await contract.option({})).result, undefined) // typing requires argument; implementation would be fine with this + // t.deepEqual((await contract.option({ option: undefined })).result, undefined) +}) + +test('u256', async t => { + t.is((await contract.u256({ u256: 1n })).result, 1n) +}) + +test('i256', async t => { + t.is((await contract.i256({ i256: -1n })).result, -1n) +}) + +test('string', async t => { + t.is((await contract.string({ string: 'hello' })).result, 'hello') +}) + +test('tuple_strukt', async t => { + const arg = [{ a: 0, b: true, c: 'hello' }, { tag: 'First', values: undefined }] as const + const res = [{ a: 0, b: true, c: 'hello' }, { tag: 'First' }] + t.deepEqual((await contract.tupleStrukt({ tuple_strukt: arg })).result, res) +}) diff --git a/cmd/crates/soroban-spec-typescript/ts-tests/src/test-deserialized-transaction.ts b/cmd/crates/soroban-spec-typescript/ts-tests/src/test-deserialized-transaction.ts new file mode 100644 index 00000000..f6152d2c --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/ts-tests/src/test-deserialized-transaction.ts @@ -0,0 +1,16 @@ +import test from "ava" +import { wallet, rpcUrl } from "./util.js" +import { Contract, networks } from "test-hello-world" + +const contract = new Contract({ ...networks.standalone, rpcUrl, wallet }) + +test("has correctly-typed result", async (t) => { + const initial = await contract.hello({ world: "tests" }) + t.is(initial.result[0], "Hello") + t.is(initial.result[1], "tests") + + const serialized = initial.toJSON() + const deserialized = contract.fromJSON.hello(serialized) + t.is(deserialized.result[0], "Hello") // throws TS error if `result` is of type `unknown` + t.is(deserialized.result[1], "tests") // throws TS error if `result` is of type `unknown` +}); diff --git a/cmd/crates/soroban-spec-typescript/ts-tests/src/test-hello-world.ts b/cmd/crates/soroban-spec-typescript/ts-tests/src/test-hello-world.ts new file mode 100644 index 00000000..e658c455 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/ts-tests/src/test-hello-world.ts @@ -0,0 +1,26 @@ +import test from "ava"; +import { root, wallet, rpcUrl } from "./util.js"; +import { Contract, networks } from "test-hello-world"; + +const contract = new Contract({ ...networks.standalone, rpcUrl, wallet }); + +test("hello", async (t) => { + t.deepEqual((await contract.hello({ world: "tests" })).result, ["Hello", "tests"]); +}); + +test("auth", async (t) => { + t.deepEqual( + (await contract.auth({ + addr: root.keypair.publicKey(), + world: 'lol' + })).result, + root.keypair.publicKey() + ) +}); + +test("inc", async (t) => { + const { result: startingBalance } = await contract.getCount() + const inc = await contract.inc() + t.is((await inc.signAndSend()).result, startingBalance + 1) + t.is((await contract.getCount()).result, startingBalance + 1) +}); diff --git a/cmd/crates/soroban-spec-typescript/ts-tests/src/test-methods-as-args.ts b/cmd/crates/soroban-spec-typescript/ts-tests/src/test-methods-as-args.ts new file mode 100644 index 00000000..afa3b651 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/ts-tests/src/test-methods-as-args.ts @@ -0,0 +1,12 @@ +import test from "ava"; +import { wallet, rpcUrl } from "./util.js"; +import { Contract, networks } from "test-hello-world"; + +const contract = new Contract({ ...networks.standalone, rpcUrl, wallet }); + +// this test checks that apps can pass methods as arguments to other methods and have them still work +const hello = contract.hello + +test("hello", async (t) => { + t.deepEqual((await hello({ world: "tests" })).result, ["Hello", "tests"]); +}); diff --git a/cmd/crates/soroban-spec-typescript/ts-tests/src/test-swap.ts b/cmd/crates/soroban-spec-typescript/ts-tests/src/test-swap.ts new file mode 100644 index 00000000..a473cb4d --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/ts-tests/src/test-swap.ts @@ -0,0 +1,134 @@ +import test from "ava" +import { SorobanRpc, xdr } from '@stellar/stellar-sdk' +import { wallet, rpcUrl, alice, bob, networkPassphrase, root, Wallet } from "./util.js" +import { Contract as Token } from "token" +import { Contract as Swap, networks, NeedsMoreSignaturesError } from "test-swap" +import fs from "node:fs" + +const tokenAId = fs.readFileSync(new URL("../contract-id-token-a.txt", import.meta.url), "utf8").trim() +const tokenBId = fs.readFileSync(new URL("../contract-id-token-b.txt", import.meta.url), "utf8").trim() + +// `root` is the invoker of all contracts +const tokenA = new Token({ + contractId: tokenAId, + networkPassphrase, + rpcUrl, + wallet, +}) +const tokenB = new Token({ + contractId: tokenBId, + networkPassphrase, + rpcUrl, + wallet, +}) +function swapContractAs(invoker: typeof root | typeof alice | typeof bob) { + return new Swap({ + ...networks.standalone, + rpcUrl, + wallet: new Wallet(invoker.keypair.publicKey()), + }) +} + +const amountAToSwap = 2n +const amountBToSwap = 1n +const alicePk = alice.keypair.publicKey() +const bobPk = bob.keypair.publicKey() + +test('calling `signAndSend()` too soon throws descriptive error', async t => { + const swapContract = swapContractAs(root) + const tx = await swapContract.swap({ + a: alicePk, + b: bobPk, + token_a: tokenAId, + token_b: tokenBId, + amount_a: amountAToSwap, + min_a_for_b: amountAToSwap, + amount_b: amountBToSwap, + min_b_for_a: amountBToSwap, + }) + const error = await t.throwsAsync(tx.signAndSend()) + t.true(error instanceof NeedsMoreSignaturesError, `error is not of type 'NeedsMoreSignaturesError'; instead it is of type '${error?.constructor.name}'`) + if (error) t.regex(error.message, /needsNonInvokerSigningBy/) +}) + +test('alice swaps bob 10 A for 1 B', async t => { + const swapContractAsRoot = swapContractAs(root) + const [ + { result: aliceStartingABalance }, + { result: aliceStartingBBalance }, + { result: bobStartingABalance }, + { result: bobStartingBBalance }, + ] = await Promise.all([ + tokenA.balance({ id: alicePk }), + tokenB.balance({ id: alicePk }), + tokenA.balance({ id: bobPk }), + tokenB.balance({ id: bobPk }), + ]) + t.true(aliceStartingABalance >= amountAToSwap, `alice does not have enough Token A! aliceStartingABalance: ${aliceStartingABalance}`) + t.true(bobStartingBBalance >= amountBToSwap, `bob does not have enough Token B! bobStartingBBalance: ${bobStartingBBalance}`) + + const tx = await swapContractAsRoot.swap({ + a: alicePk, + b: bobPk, + token_a: tokenAId, + token_b: tokenBId, + amount_a: amountAToSwap, + min_a_for_b: amountAToSwap, + amount_b: amountBToSwap, + min_b_for_a: amountBToSwap, + }) + + const needsNonInvokerSigningBy = await tx.needsNonInvokerSigningBy() + t.is(needsNonInvokerSigningBy.length, 2) + t.is(needsNonInvokerSigningBy.indexOf(alicePk), 0, 'needsNonInvokerSigningBy does not have alice\'s public key!') + t.is(needsNonInvokerSigningBy.indexOf(bobPk), 1, 'needsNonInvokerSigningBy does not have bob\'s public key!') + + + // root serializes & sends to alice + const jsonFromRoot = tx.toJSON() + const txAlice = swapContractAs(alice).fromJSON.swap(jsonFromRoot) + await txAlice.signAuthEntries() + + // alice serializes & sends to bob + const jsonFromAlice = txAlice.toJSON() + const txBob = swapContractAs(bob).fromJSON.swap(jsonFromAlice) + await txBob.signAuthEntries() + + // bob serializes & sends back to root + const jsonFromBob = txBob.toJSON() + const txRoot = swapContractAsRoot.fromJSON.swap(jsonFromBob) + const result = await txRoot.signAndSend() + + t.truthy(result.sendTransactionResponse, `tx failed: ${JSON.stringify(result, null, 2)}`) + t.is(result.sendTransactionResponse!.status, 'PENDING', `tx failed: ${JSON.stringify(result, null, 2)}`) + t.truthy(result.getTransactionResponseAll?.length, `tx failed: ${JSON.stringify(result.getTransactionResponseAll, null, 2)}`) + t.not(result.getTransactionResponse!.status, 'FAILED', `tx failed: ${JSON.stringify( + ((result.getTransactionResponse as SorobanRpc.Api.GetFailedTransactionResponse) + .resultXdr.result().value() as xdr.OperationResult[] + ).map(op => + op.value()?.value().switch() + ), null, 2)}` + ) + t.is( + result.getTransactionResponse!.status, + SorobanRpc.Api.GetTransactionStatus.SUCCESS, + `tx failed: ${JSON.stringify(result.getTransactionResponse, null, 2)}` + ) + + t.is( + (await tokenA.balance({ id: alicePk })).result, + aliceStartingABalance - amountAToSwap + ) + t.is( + (await tokenB.balance({ id: alicePk })).result, + aliceStartingBBalance + amountBToSwap + ) + t.is( + (await tokenA.balance({ id: bobPk })).result, + bobStartingABalance + amountAToSwap + ) + t.is( + (await tokenB.balance({ id: bobPk })).result, + bobStartingBBalance - amountBToSwap + ) +}) diff --git a/cmd/crates/soroban-spec-typescript/ts-tests/src/util.ts b/cmd/crates/soroban-spec-typescript/ts-tests/src/util.ts new file mode 100644 index 00000000..d5539fd1 --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/ts-tests/src/util.ts @@ -0,0 +1,57 @@ +import { spawnSync } from "node:child_process"; +import { Keypair, TransactionBuilder, hash } from "@stellar/stellar-sdk"; +import { Address } from 'test-custom-types' + +const rootKeypair = Keypair.fromSecret(spawnSync("./soroban", ["keys", "show", "root"], { shell: true, encoding: "utf8" }).stdout.trim()); +const aliceKeypair = Keypair.fromSecret(spawnSync("./soroban", ["keys", "show", "alice"], { shell: true, encoding: "utf8" }).stdout.trim()); +const bobKeypair = Keypair.fromSecret(spawnSync("./soroban", ["keys", "show", "bob"], { shell: true, encoding: "utf8" }).stdout.trim()); + +export const root = { + keypair: rootKeypair, + address: Address.fromString(rootKeypair.publicKey()), +} + +export const alice = { + keypair: aliceKeypair, + address: Address.fromString(aliceKeypair.publicKey()), +} + +export const bob = { + keypair: bobKeypair, + address: Address.fromString(bobKeypair.publicKey()), +} + +function getKeypair(pk: string): Keypair { + return Keypair.fromSecret({ + [root.keypair.publicKey()]: root.keypair.secret(), + [alice.keypair.publicKey()]: alice.keypair.secret(), + [bob.keypair.publicKey()]: bob.keypair.secret(), + }[pk]) +} + +export const rpcUrl = process.env.SOROBAN_RPC_URL ?? "http://localhost:8000/"; +export const networkPassphrase = process.env.SOROBAN_NETWORK_PASSPHRASE ?? "Standalone Network ; February 2017"; + +export class Wallet { + constructor(private publicKey: string) {} + isConnected = () => Promise.resolve(true) + isAllowed = () => Promise.resolve(true) + getUserInfo = () => Promise.resolve({ publicKey: this.publicKey }) + signTransaction = async (tx: string) => { + const t = TransactionBuilder.fromXDR(tx, networkPassphrase); + t.sign(getKeypair(this.publicKey)); + return t.toXDR(); + } + signAuthEntry = async ( + entryXdr: string, + opts?: { + accountToSign?: string, + } + ): Promise => { + return getKeypair(opts?.accountToSign ?? this.publicKey) + .sign(hash(Buffer.from(entryXdr, "base64"))) + .toString('base64') + } +} + +export const wallet = new Wallet(root.keypair.publicKey()) diff --git a/cmd/crates/soroban-spec-typescript/ts-tests/tsconfig.json b/cmd/crates/soroban-spec-typescript/ts-tests/tsconfig.json new file mode 100644 index 00000000..119437bc --- /dev/null +++ b/cmd/crates/soroban-spec-typescript/ts-tests/tsconfig.json @@ -0,0 +1,101 @@ +{ + "compilerOptions": { + /* Visit https://aka.ms/tsconfig to read more about this file */ + /* Projects */ + // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ + // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ + // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ + // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ + // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ + // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ + /* Language and Environment */ + "target": "esnext", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ + // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + // "jsx": "preserve", /* Specify what JSX code is generated. */ + // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ + // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ + // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ + // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ + // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ + // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ + // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ + // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ + // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ + /* Modules */ + "module": "nodenext", /* Specify what module code is generated. */ + // "rootDir": "./", /* Specify the root folder within your source files. */ + "moduleResolution": "nodenext", /* Specify how TypeScript looks up a file from a given module specifier. */ + // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ + // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ + // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ + // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ + // "types": [], /* Specify type package names to be included without being referenced in a source file. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ + // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */ + // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ + // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ + // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ + // "resolveJsonModule": true, /* Enable importing .json files. */ + // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ + // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ + /* JavaScript Support */ + // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ + // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ + // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ + /* Emit */ + // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ + // "declarationMap": true, /* Create sourcemaps for d.ts files. */ + // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ + // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ + // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ + // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ + "outDir": "./build", /* Specify an output folder for all emitted files. */ + // "removeComments": true, /* Disable emitting comments. */ + // "noEmit": true, /* Disable emitting files from a compilation. */ + // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ + // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ + // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ + // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ + // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ + // "newLine": "crlf", /* Set the newline character for emitting files. */ + // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ + // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ + // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ + // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ + // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ + // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ + /* Interop Constraints */ + // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ + // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ + // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ + "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ + // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ + "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ + /* Type Checking */ + "strict": true, /* Enable all strict type-checking options. */ + // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ + // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ + // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ + // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ + // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ + // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ + // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ + // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ + // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ + // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ + // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ + // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ + // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ + // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ + // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ + // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ + // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ + // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ + /* Completeness */ + // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ + "skipLibCheck": true /* Skip type checking all .d.ts files. */ + } +} diff --git a/cmd/crates/soroban-test/Cargo.toml b/cmd/crates/soroban-test/Cargo.toml new file mode 100644 index 00000000..649a37e5 --- /dev/null +++ b/cmd/crates/soroban-test/Cargo.toml @@ -0,0 +1,42 @@ +[package] +name = "soroban-test" +description = "Soroban Test Framework" +homepage = "https://github.com/stellar/soroban-test" +repository = "https://github.com/stellar/soroban-test" +authors = ["Stellar Development Foundation "] +license = "Apache-2.0" +readme = "README.md" +version = "20.2.0" +edition = "2021" +rust-version.workspace = true +autobins = false + + +[lib] +crate-type = ["rlib", "cdylib"] + + +[dependencies] +soroban-env-host = { workspace = true } +soroban-spec = { workspace = true } +soroban-spec-tools = { workspace = true } +soroban-ledger-snapshot = { workspace = true } +stellar-strkey = { workspace = true } +soroban-sdk = { workspace = true } +sep5 = { workspace = true } +soroban-cli = { workspace = true } + +thiserror = "1.0.31" +sha2 = "0.10.6" +assert_cmd = "2.0.4" +assert_fs = "1.0.7" +predicates = "2.1.5" +fs_extra = "1.3.0" + +[dev-dependencies] +serde_json = "1.0.93" +which = { workspace = true } +tokio = "1.28.1" + +[features] +integration = [] diff --git a/cmd/crates/soroban-test/README.md b/cmd/crates/soroban-test/README.md new file mode 100644 index 00000000..3f8cdc9f --- /dev/null +++ b/cmd/crates/soroban-test/README.md @@ -0,0 +1,54 @@ +Soroban Test +============ + +Test framework wrapping Soroban CLI. + +Provides a way to run tests against a local sandbox; running against RPC endpoint _coming soon_. + + +Overview +======== + +- `TestEnv` is a test environment for running tests isolated from each other. +- `TestEnv::with_default` invokes a closure, which is passed a reference to a random `TestEnv`. +- `TestEnv::new_assert_cmd` creates an `assert_cmd::Command` for a given subcommand and sets the current + directory to be the same as `TestEnv`. +- `TestEnv::cmd` is a generic function which parses a command from a string. + Note, however, that it uses `shlex` to tokenize the string. This can cause issues + for commands which contain strings with `"`s. For example, `{"hello": "world"}` becomes + `{hello:world}`. For that reason it's recommended to use `TestEnv::cmd_arr` instead. +- `TestEnv::cmd_arr` is a generic function which takes an array of `&str` which is passed directly to clap. + This is the preferred way since it ensures no string parsing footguns. +- `TestEnv::invoke` a convenience function for using the invoke command. + + +Example +======= + +```rs +use soroban_test::{TestEnv, Wasm}; + +const WASM: &Wasm = &Wasm::Release("soroban_hello_world_contract"); +const FRIEND: &str = "friend"; + +#[test] +fn invoke() { + TestEnv::with_default(|workspace| { + assert_eq!( + format!("[\"Hello\",\"{FRIEND}\"]"), + workspace + .invoke(&[ + "--id", + "1", + "--wasm", + &WASM.path().to_string_lossy(), + "--", + "hello", + "--to", + FRIEND, + ]) + .unwrap() + ); + }); +} +``` diff --git a/cmd/crates/soroban-test/src/lib.rs b/cmd/crates/soroban-test/src/lib.rs new file mode 100644 index 00000000..bda6ec42 --- /dev/null +++ b/cmd/crates/soroban-test/src/lib.rs @@ -0,0 +1,229 @@ +//! **Soroban Test** - Test framework for invoking Soroban externally. +//! +//! Currently soroban provides a mock test environment for writing unit tets. +//! +//! However, it does not provide a way to run tests against a local sandbox or rpc endpoint. +//! +//! ## Overview +//! +//! - `TestEnv` is a test environment for running tests isolated from each other. +//! - `TestEnv::with_default` invokes a closure, which is passed a reference to a random `TestEnv`. +//! - `TestEnv::new_assert_cmd` creates an `assert_cmd::Command` for a given subcommand and sets the current +//! directory to be the same as `TestEnv`. +//! - `TestEnv::cmd` is a generic function which parses a command from a string. +//! Note, however, that it uses `shlex` to tokenize the string. This can cause issues +//! for commands which contain strings with `"`s. For example, `{"hello": "world"}` becomes +//! `{hello:world}`. For that reason it's recommended to use `TestEnv::cmd_arr` instead. +//! - `TestEnv::cmd_arr` is a generic function which takes an array of `&str` which is passed directly to clap. +//! This is the preferred way since it ensures no string parsing footguns. +//! - `TestEnv::invoke` a convenience function for using the invoke command. +//! +#![allow( + clippy::missing_errors_doc, + clippy::must_use_candidate, + clippy::missing_panics_doc +)] +use std::{ffi::OsString, fmt::Display, path::Path}; + +use assert_cmd::{assert::Assert, Command}; +use assert_fs::{fixture::FixtureError, prelude::PathChild, TempDir}; +use fs_extra::dir::CopyOptions; + +use soroban_cli::{ + commands::{config, contract, contract::invoke, global, keys}, + CommandParser, Pwd, +}; + +mod wasm; +pub use wasm::Wasm; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + TempDir(#[from] FixtureError), + + #[error(transparent)] + FsError(#[from] fs_extra::error::Error), + + #[error(transparent)] + Invoke(#[from] invoke::Error), +} + +/// A `TestEnv` is a contained process for a specific test, with its own ENV and +/// its own `TempDir` where it will save test-specific configuration. +pub struct TestEnv { + pub temp_dir: TempDir, +} + +impl Default for TestEnv { + fn default() -> Self { + Self::new().unwrap() + } +} + +impl TestEnv { + /// Execute a closure which is passed a reference to the `TestEnv`. + /// `TempDir` implements the `Drop` trait ensuring that the temporary directory + /// it creates is deleted when the `TestEnv` is dropped. This pattern ensures + /// that the `TestEnv` cannot be dropped by the closure. For this reason, it's + /// recommended to use `TempDir::with_default` instead of `new` or `default`. + /// + /// ```rust,no_run + /// use soroban_test::TestEnv; + /// TestEnv::with_default(|env| { + /// env.new_assert_cmd("contract").args(&["invoke", "--id", "1", "--", "hello", "--world=world"]).assert().success(); + /// }); + /// ``` + /// + pub fn with_default(f: F) { + let test_env = TestEnv::default(); + f(&test_env); + } + pub fn new() -> Result { + let this = TempDir::new().map(|temp_dir| TestEnv { temp_dir })?; + std::env::set_var("XDG_CONFIG_HOME", this.temp_dir.as_os_str()); + this.new_assert_cmd("keys") + .arg("generate") + .arg("test") + .arg("-d") + .arg("--no-fund") + .assert(); + std::env::set_var("SOROBAN_ACCOUNT", "test"); + Ok(this) + } + + /// Create a new `assert_cmd::Command` for a given subcommand and set's the current directory + /// to be the internal `temp_dir`. + pub fn new_assert_cmd(&self, subcommand: &str) -> Command { + let mut this = Command::cargo_bin("soroban").unwrap_or_else(|_| Command::new("soroban")); + this.arg("-q"); + this.arg(subcommand); + this.current_dir(&self.temp_dir); + this + } + + /// Parses a `&str` into a command and sets the pwd to be the same as the current `TestEnv`. + /// Uses shlex under the hood and thus has issues parsing strings with embedded `"`s. + /// Thus `TestEnv::cmd_arr` is recommended to instead. + pub fn cmd>(&self, args: &str) -> T { + Self::cmd_with_pwd(args, self.dir()) + } + + /// Same as `TestEnv::cmd` but sets the pwd can be used instead of the current `TestEnv`. + pub fn cmd_with_pwd>(args: &str, pwd: &Path) -> T { + let args = format!("--config-dir={pwd:?} {args}"); + T::parse(&args).unwrap() + } + + /// Same as `TestEnv::cmd_arr` but sets the pwd can be used instead of the current `TestEnv`. + pub fn cmd_arr_with_pwd>(args: &[&str], pwd: &Path) -> T { + let mut cmds = vec!["--config-dir", pwd.to_str().unwrap()]; + cmds.extend_from_slice(args); + T::parse_arg_vec(&cmds).unwrap() + } + + /// Parse a command using an array of `&str`s, which passes the strings directly to clap + /// avoiding some issues `cmd` has with shlex. Use the current `TestEnv` pwd. + pub fn cmd_arr>(&self, args: &[&str]) -> T { + Self::cmd_arr_with_pwd(args, self.dir()) + } + + /// A convenience method for using the invoke command. + pub async fn invoke>(&self, command_str: &[I]) -> Result { + let cmd = contract::invoke::Cmd::parse_arg_vec( + &command_str + .iter() + .map(AsRef::as_ref) + .filter(|s| !s.is_empty()) + .collect::>(), + ) + .unwrap(); + self.invoke_cmd(cmd).await + } + + /// Invoke an already parsed invoke command + pub async fn invoke_cmd(&self, mut cmd: invoke::Cmd) -> Result { + cmd.set_pwd(self.dir()); + cmd.run_against_rpc_server(&global::Args { + locator: config::locator::Args { + global: false, + config_dir: None, + }, + filter_logs: Vec::default(), + quiet: false, + verbose: false, + very_verbose: false, + list: false, + }) + .await + } + + /// Reference to current directory of the `TestEnv`. + pub fn dir(&self) -> &TempDir { + &self.temp_dir + } + + /// Returns the public key corresponding to the test keys's `hd_path` + pub fn test_address(&self, hd_path: usize) -> String { + self.cmd::(&format!("--hd-path={hd_path}")) + .public_key() + .unwrap() + .to_string() + } + + /// Returns the private key corresponding to the test keys's `hd_path` + pub fn test_show(&self, hd_path: usize) -> String { + self.cmd::(&format!("--hd-path={hd_path}")) + .private_key() + .unwrap() + .to_string() + } + + /// Copy the contents of the current `TestEnv` to another `TestEnv` + pub fn fork(&self) -> Result { + let this = TestEnv::new()?; + self.save(&this.temp_dir)?; + Ok(this) + } + + /// Save the current state of the `TestEnv` to the given directory. + pub fn save(&self, dst: &Path) -> Result<(), Error> { + fs_extra::dir::copy(&self.temp_dir, dst, &CopyOptions::new())?; + Ok(()) + } +} + +pub fn temp_ledger_file() -> OsString { + TempDir::new() + .unwrap() + .child("ledger.json") + .as_os_str() + .into() +} + +pub trait AssertExt { + fn stdout_as_str(&self) -> String; +} + +impl AssertExt for Assert { + fn stdout_as_str(&self) -> String { + String::from_utf8(self.get_output().stdout.clone()) + .expect("failed to make str") + .trim() + .to_owned() + } +} +pub trait CommandExt { + fn json_arg(&mut self, j: A) -> &mut Self + where + A: Display; +} + +impl CommandExt for Command { + fn json_arg(&mut self, j: A) -> &mut Self + where + A: Display, + { + self.arg(OsString::from(j.to_string())) + } +} diff --git a/cmd/crates/soroban-test/src/wasm.rs b/cmd/crates/soroban-test/src/wasm.rs new file mode 100644 index 00000000..d03114b9 --- /dev/null +++ b/cmd/crates/soroban-test/src/wasm.rs @@ -0,0 +1,61 @@ +use std::{fmt::Display, fs, path::PathBuf}; + +use sha2::{Digest, Sha256}; +use soroban_env_host::xdr; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Xdr(#[from] xdr::Error), +} + +pub enum Wasm<'a> { + Release(&'a str), + Custom(&'a str, &'a str), +} + +fn find_target_dir() -> Option { + let path = std::env::current_dir().unwrap(); + for parent in path.ancestors() { + let path = parent.join("target"); + if path.is_dir() { + return Some(path); + } + } + None +} + +impl Wasm<'_> { + /// # Panics + /// + /// # if not found + pub fn path(&self) -> PathBuf { + let path = find_target_dir().unwrap().join("wasm32-unknown-unknown"); + let mut path = match self { + Wasm::Release(name) => path.join("release").join(name), + Wasm::Custom(profile, name) => path.join(profile).join(name), + }; + path.set_extension("wasm"); + assert!(path.is_file(), "File not found: {}. run 'make build-test-wasms' to generate .wasm files before running this test", path.display()); + std::env::current_dir().unwrap().join(path) + } + + /// # Panics + /// + /// # if not found + pub fn bytes(&self) -> Vec { + fs::read(self.path()).unwrap() + } + + /// # Errors + /// + pub fn hash(&self) -> Result { + Ok(xdr::Hash(Sha256::digest(self.bytes()).into())) + } +} + +impl Display for Wasm<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.path().display()) + } +} diff --git a/cmd/crates/soroban-test/tests/fixtures/args/world b/cmd/crates/soroban-test/tests/fixtures/args/world new file mode 100644 index 00000000..04fea064 --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/args/world @@ -0,0 +1 @@ +world \ No newline at end of file diff --git a/cmd/crates/soroban-test/tests/fixtures/hello/Cargo.lock b/cmd/crates/soroban-test/tests/fixtures/hello/Cargo.lock new file mode 100644 index 00000000..dee1dea7 --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/hello/Cargo.lock @@ -0,0 +1,7 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "soroban-hello" +version = "0.1.0" diff --git a/cmd/crates/soroban-test/tests/fixtures/hello/Cargo.toml b/cmd/crates/soroban-test/tests/fixtures/hello/Cargo.toml new file mode 100644 index 00000000..01b80b0f --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/hello/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "soroban-hello" +version = "20.2.0" +edition = "2021" +publish = false + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] diff --git a/cmd/crates/soroban-test/tests/fixtures/hello/src/main.rs b/cmd/crates/soroban-test/tests/fixtures/hello/src/main.rs new file mode 100644 index 00000000..e7a11a96 --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/hello/src/main.rs @@ -0,0 +1,3 @@ +fn main() { + println!("Hello, world!"); +} diff --git a/cmd/crates/soroban-test/tests/fixtures/test-jsons/get-events.json b/cmd/crates/soroban-test/tests/fixtures/test-jsons/get-events.json new file mode 100644 index 00000000..1fe8e42f --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/test-jsons/get-events.json @@ -0,0 +1,57 @@ +{ + "latestLedger": 43601285, + "events": [ + { + "type": "contract", + "ledger": "40", + "ledgerClosedAt": "2022-12-14T01:01:20Z", + "contractId": "CBXL4AIUVYK7OLYYP4C5A3OLM2ZCXWLSDB2VZG2GI2YDJK4WD7A5LTHT", + "id": "0000000171798695937-0000000001", + "pagingToken": "0000000171798695937-0000000001", + "topic": [ + "AAAABQAAAAdDT1VOVEVSAA==", + "AAAABQAAAAlpbmNyZW1lbnQAAAA=" + ], + "value": "AAAAAQAAAAE=" + }, + { + "type": "system", + "ledger": "43601283", + "ledgerClosedAt": "2022-11-16T16:10:41Z", + "contractId": "CDR6QKTWZQYW6YUJ7UP7XXZRLWQPFRV6SWBLQS4ZQOSAF4BOUD77OO5Z", + "id": "0187266084548644865-0000000003", + "pagingToken": "187266084548644865-3", + "topic": [ + "AAAABQAAAAh0cmFuc2Zlcg==", + "AAAAAQB6Mcc=" + ], + "value": "AAAABQAAAApHaWJNb255UGxzAAA=" + }, + { + "type": "contract", + "ledger": "43601284", + "ledgerClosedAt": "2022-11-16T16:10:46Z", + "contractId": "CDR6QKTWZQYW6YUJ7UP7XXZRLWQPFRV6SWBLQS4ZQOSAF4BOUD77OO5Z", + "id": "0187266088843612161-0000000003", + "pagingToken": "187266088843612161-3", + "topic": [ + "AAAABQAAAAh0cmFuc2Zlcg==", + "AAAAAQB6Mcc=" + ], + "value": "AAAABQAAAApHaWJNb255UGxzAAA=" + }, + { + "type": "system", + "ledger": "43601285", + "ledgerClosedAt": "2022-11-16T16:10:51Z", + "contractId": "CCR6QKTWZQYW6YUJ7UP7XXZRLWQPFRV6SWBLQS4ZQOSAF4BOUD77OTE2", + "id": "0187266093138579457-0000000003", + "pagingToken": "187266093138579457-3", + "topic": [ + "AAAABQAAAAh0cmFuc2Zlcg==", + "AAAAAQB6Mcc=" + ], + "value": "AAAABQAAAApHaWJNb255UGxzAAA=" + } + ] +} diff --git a/cmd/crates/soroban-test/tests/fixtures/test-wasms/custom_type/Cargo.toml b/cmd/crates/soroban-test/tests/fixtures/test-wasms/custom_type/Cargo.toml new file mode 100644 index 00000000..67df45b6 --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/test-wasms/custom_type/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "test_custom_types" +version = "20.2.0" +authors = ["Stellar Development Foundation "] +license = "Apache-2.0" +edition = "2021" +publish = false +rust-version.workspace = true + +[lib] +crate-type = ["cdylib", "rlib"] +doctest = false + +[dependencies] +soroban-sdk = { workspace = true } + +[dev-dependencies] +soroban-sdk = { workspace = true, features = ["testutils"]} diff --git a/cmd/crates/soroban-test/tests/fixtures/test-wasms/custom_type/src/lib.rs b/cmd/crates/soroban-test/tests/fixtures/test-wasms/custom_type/src/lib.rs new file mode 100644 index 00000000..9e4b442b --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/test-wasms/custom_type/src/lib.rs @@ -0,0 +1,187 @@ +#![no_std] +use soroban_sdk::{ + contract, contracterror, contractimpl, contracttype, symbol_short, vec, Address, Bytes, BytesN, + Env, Map, String, Symbol, Val, Vec, I256, U256, +}; + +#[contract] +pub struct Contract; + +/// This is from the rust doc above the struct Test +#[contracttype] +pub struct Test { + pub a: u32, + pub b: bool, + pub c: Symbol, +} + +#[contracttype] +pub enum SimpleEnum { + First, + Second, + Third, +} + +#[contracttype] +#[derive(Clone, Copy)] +// The `repr` attribute is here to specify the memory alignment for this type +#[repr(u32)] +pub enum RoyalCard { + // TODO: create the fields here for your `RoyalCard` type + Jack = 11, // delete this + Queen = 12, // delete this + King = 13, // delete this +} + +#[contracttype] +pub struct TupleStruct(Test, SimpleEnum); + +#[contracttype] +pub enum ComplexEnum { + Struct(Test), + Tuple(TupleStruct), + Enum(SimpleEnum), + Asset(Address, i128), + Void, +} + +#[contracterror] +#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord)] +#[repr(u32)] +pub enum Error { + /// Please provide an odd number + NumberMustBeOdd = 1, +} +#[contractimpl] +impl Contract { + pub fn hello(_env: Env, hello: Symbol) -> Symbol { + hello + } + + pub fn woid(_env: Env) { + // do nothing + } + + pub fn val(_env: Env) -> Val { + Val::default() + } + + pub fn u32_fail_on_even(_env: Env, u32_: u32) -> Result { + if u32_ % 2 == 1 { + Ok(u32_) + } else { + Err(Error::NumberMustBeOdd) + } + } + + pub fn u32_(_env: Env, u32_: u32) -> u32 { + u32_ + } + + pub fn i32_(_env: Env, i32_: i32) -> i32 { + i32_ + } + + pub fn i64_(_env: Env, i64_: i64) -> i64 { + i64_ + } + + /// Example contract method which takes a struct + pub fn strukt_hel(env: Env, strukt: Test) -> Vec { + vec![&env, symbol_short!("Hello"), strukt.c] + } + + pub fn strukt(_env: Env, strukt: Test) -> Test { + strukt + } + + pub fn simple(_env: Env, simple: SimpleEnum) -> SimpleEnum { + simple + } + + pub fn complex(_env: Env, complex: ComplexEnum) -> ComplexEnum { + complex + } + + pub fn addresse(_env: Env, addresse: Address) -> Address { + addresse + } + + pub fn bytes(_env: Env, bytes: Bytes) -> Bytes { + bytes + } + + pub fn bytes_n(_env: Env, bytes_n: BytesN<9>) -> BytesN<9> { + bytes_n + } + + pub fn card(_env: Env, card: RoyalCard) -> RoyalCard { + card + } + + pub fn boolean(_: Env, boolean: bool) -> bool { + boolean + } + + /// Negates a boolean value + pub fn not(_env: Env, boolean: bool) -> bool { + !boolean + } + + pub fn i128(_env: Env, i128: i128) -> i128 { + i128 + } + + pub fn u128(_env: Env, u128: u128) -> u128 { + u128 + } + + pub fn multi_args(_env: Env, a: u32, b: bool) -> u32 { + if b { + a + } else { + 0 + } + } + + pub fn map(_env: Env, map: Map) -> Map { + map + } + + pub fn vec(_env: Env, vec: Vec) -> Vec { + vec + } + + pub fn tuple(_env: Env, tuple: (Symbol, u32)) -> (Symbol, u32) { + tuple + } + + /// Example of an optional argument + pub fn option(_env: Env, option: Option) -> Option { + option + } + + pub fn u256(_env: Env, u256: U256) -> U256 { + u256 + } + + pub fn i256(_env: Env, i256: I256) -> I256 { + i256 + } + + pub fn string(_env: Env, string: String) -> String { + string + } + + pub fn tuple_strukt(_env: Env, tuple_strukt: TupleStruct) -> TupleStruct { + tuple_strukt + } + + // pub fn timepoint(_env: Env, timepoint: TimePoint) -> TimePoint { + // timepoint + // } + + // pub fn duration(_env: Env, duration: Duration) -> Duration { + // duration + // } +} diff --git a/cmd/crates/soroban-test/tests/fixtures/test-wasms/hello_world/Cargo.toml b/cmd/crates/soroban-test/tests/fixtures/test-wasms/hello_world/Cargo.toml new file mode 100644 index 00000000..e5ced55f --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/test-wasms/hello_world/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "test_hello_world" +version = "20.2.0" +authors = ["Stellar Development Foundation "] +license = "Apache-2.0" +edition = "2021" +publish = false +rust-version.workspace = true + +[lib] +crate-type = ["cdylib", "rlib"] +doctest = false + +[dependencies] +soroban-sdk = { workspace = true } + +[dev-dependencies] +soroban-sdk = { workspace = true, features = ["testutils"]} diff --git a/cmd/crates/soroban-test/tests/fixtures/test-wasms/hello_world/src/lib.rs b/cmd/crates/soroban-test/tests/fixtures/test-wasms/hello_world/src/lib.rs new file mode 100644 index 00000000..40006a1b --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/test-wasms/hello_world/src/lib.rs @@ -0,0 +1,86 @@ +#![no_std] +use soroban_sdk::{ + contract, contractimpl, log, symbol_short, vec, Address, BytesN, Env, String, Symbol, Vec, +}; + +const COUNTER: Symbol = symbol_short!("COUNTER"); + +#[contract] +pub struct Contract; + +#[contractimpl] +impl Contract { + pub fn hello(env: Env, world: Symbol) -> Vec { + vec![&env, symbol_short!("Hello"), world] + } + + pub fn world(env: Env, hello: Symbol) -> Vec { + vec![&env, symbol_short!("Hello"), hello] + } + + pub fn not(env: Env, boolean: bool) -> Vec { + vec![&env, !boolean] + } + + pub fn auth(env: Env, addr: Address, world: Symbol) -> Address { + addr.require_auth(); + // Emit test event + env.events().publish(("auth",), world); + + addr + } + + // get current count + pub fn get_count(env: Env) -> u32 { + env.storage().persistent().get(&COUNTER).unwrap_or(0) + } + + // increment count and return new one + pub fn inc(env: Env) -> u32 { + let mut count: u32 = env.storage().persistent().get(&COUNTER).unwrap_or(0); // Panic if the value of COUNTER is not u32. + log!(&env, "count: {}", count); + + // Increment the count. + count += 1; + + // Save the count. + env.storage().persistent().set(&COUNTER, &count); + count + } + + pub fn prng_u64_in_range(env: Env, low: u64, high: u64) -> u64 { + env.prng().gen_range(low..=high) + } + + pub fn upgrade_contract(env: Env, hash: BytesN<32>) { + env.deployer().update_current_contract_wasm(hash); + } + + #[allow(unused_variables)] + pub fn multi_word_cmd(env: Env, contract_owner: String) {} + /// Logs a string with `hello ` in front. + pub fn log(env: Env, str: Symbol) { + env.events().publish( + (Symbol::new(&env, "hello"), Symbol::new(&env, "")), + str.clone(), + ); + log!(&env, "hello {}", str); + } +} + +#[cfg(test)] +mod test { + use soroban_sdk::{symbol_short, vec, Env}; + + use crate::{Contract, ContractClient}; + + #[test] + fn test_hello() { + let env = Env::default(); + let contract_id = env.register_contract(None, Contract); + let client = ContractClient::new(&env, &contract_id); + let world = symbol_short!("world"); + let res = client.hello(&world); + assert_eq!(res, vec![&env, symbol_short!("Hello"), world]); + } +} diff --git a/cmd/crates/soroban-test/tests/fixtures/test-wasms/swap/Cargo.toml b/cmd/crates/soroban-test/tests/fixtures/test-wasms/swap/Cargo.toml new file mode 100644 index 00000000..f2465e22 --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/test-wasms/swap/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "test_swap" +version.workspace = true +authors = ["Stellar Development Foundation "] +license = "Apache-2.0" +edition = "2021" +publish = false +rust-version.workspace = true + +[lib] +crate-type = ["cdylib"] +doctest = false + +[dependencies] +soroban-sdk = { workspace = true } + +[dev_dependencies] +soroban-sdk = { workspace = true, features = ["testutils"] } diff --git a/cmd/crates/soroban-test/tests/fixtures/test-wasms/swap/src/lib.rs b/cmd/crates/soroban-test/tests/fixtures/test-wasms/swap/src/lib.rs new file mode 100644 index 00000000..528b8422 --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/test-wasms/swap/src/lib.rs @@ -0,0 +1,77 @@ +//! This contract performs an atomic token swap between two parties. +//! Parties don't need to know each other and their signatures may be matched +//! off-chain. +//! This example demonstrates how multi-party authorization can be implemented. +#![no_std] + +use soroban_sdk::{contract, contractimpl, token, Address, Env, IntoVal}; + +#[contract] +pub struct AtomicSwapContract; + +#[contractimpl] +impl AtomicSwapContract { + // Swap token A for token B atomically. Settle for the minimum requested price + // for each party (this is an arbitrary choice; both parties could have + // received the full amount as well). + pub fn swap( + env: Env, + a: Address, + b: Address, + token_a: Address, + token_b: Address, + amount_a: i128, + min_b_for_a: i128, + amount_b: i128, + min_a_for_b: i128, + ) { + // Verify preconditions on the minimum price for both parties. + if amount_b < min_b_for_a { + panic!("not enough token B for token A"); + } + if amount_a < min_a_for_b { + panic!("not enough token A for token B"); + } + // Require authorization for a subset of arguments specific to a party. + // Notice, that arguments are symmetric - there is no difference between + // `a` and `b` in the call and hence their signatures can be used + // either for `a` or for `b` role. + a.require_auth_for_args( + (token_a.clone(), token_b.clone(), amount_a, min_b_for_a).into_val(&env), + ); + b.require_auth_for_args( + (token_b.clone(), token_a.clone(), amount_b, min_a_for_b).into_val(&env), + ); + + // Perform the swap by moving tokens from a to b and from b to a. + move_token(&env, &token_a, &a, &b, amount_a, min_a_for_b); + move_token(&env, &token_b, &b, &a, amount_b, min_b_for_a); + } +} + +fn move_token( + env: &Env, + token: &Address, + from: &Address, + to: &Address, + max_spend_amount: i128, + transfer_amount: i128, +) { + let token = token::Client::new(env, token); + let contract_address = env.current_contract_address(); + // This call needs to be authorized by `from` address. It transfers the + // maximum spend amount to the swap contract's address in order to decouple + // the signature from `to` address (so that parties don't need to know each + // other). + token.transfer(from, &contract_address, &max_spend_amount); + // Transfer the necessary amount to `to`. + token.transfer(&contract_address, to, &transfer_amount); + // Refund the remaining balance to `from`. + token.transfer( + &contract_address, + from, + &(&max_spend_amount - &transfer_amount), + ); +} + +mod test; diff --git a/cmd/crates/soroban-test/tests/fixtures/test-wasms/swap/src/test.rs b/cmd/crates/soroban-test/tests/fixtures/test-wasms/swap/src/test.rs new file mode 100644 index 00000000..cf7929ef --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/test-wasms/swap/src/test.rs @@ -0,0 +1,112 @@ +#![cfg(test)] +extern crate std; + +use super::*; +use soroban_sdk::{ + symbol_short, + testutils::{Address as _, AuthorizedFunction, AuthorizedInvocation}, + token, Address, Env, IntoVal, +}; +use token::Client as TokenClient; +use token::StellarAssetClient as TokenAdminClient; + +fn create_token_contract<'a>(e: &Env, admin: &Address) -> (TokenClient<'a>, TokenAdminClient<'a>) { + let contract_address = e.register_stellar_asset_contract(admin.clone()); + ( + TokenClient::new(e, &contract_address), + TokenAdminClient::new(e, &contract_address), + ) +} + +fn create_atomic_swap_contract(e: &Env) -> AtomicSwapContractClient { + AtomicSwapContractClient::new(e, &e.register_contract(None, AtomicSwapContract {})) +} + +#[test] +fn test_atomic_swap() { + let env = Env::default(); + env.mock_all_auths(); + + let a = Address::generate(&env); + let b = Address::generate(&env); + + let token_admin = Address::generate(&env); + + let (token_a, token_a_admin) = create_token_contract(&env, &token_admin); + let (token_b, token_b_admin) = create_token_contract(&env, &token_admin); + token_a_admin.mint(&a, &1000); + token_b_admin.mint(&b, &5000); + + let contract = create_atomic_swap_contract(&env); + + contract.swap( + &a, + &b, + &token_a.address, + &token_b.address, + &1000, + &4500, + &5000, + &950, + ); + + assert_eq!( + env.auths(), + std::vec![ + ( + a.clone(), + AuthorizedInvocation { + function: AuthorizedFunction::Contract(( + contract.address.clone(), + symbol_short!("swap"), + ( + token_a.address.clone(), + token_b.address.clone(), + 1000_i128, + 4500_i128 + ) + .into_val(&env), + )), + sub_invocations: std::vec![AuthorizedInvocation { + function: AuthorizedFunction::Contract(( + token_a.address.clone(), + symbol_short!("transfer"), + (a.clone(), contract.address.clone(), 1000_i128,).into_val(&env), + )), + sub_invocations: std::vec![] + }] + } + ), + ( + b.clone(), + AuthorizedInvocation { + function: AuthorizedFunction::Contract(( + contract.address.clone(), + symbol_short!("swap"), + ( + token_b.address.clone(), + token_a.address.clone(), + 5000_i128, + 950_i128 + ) + .into_val(&env), + )), + sub_invocations: std::vec![AuthorizedInvocation { + function: AuthorizedFunction::Contract(( + token_b.address.clone(), + symbol_short!("transfer"), + (b.clone(), contract.address.clone(), 5000_i128,).into_val(&env), + )), + sub_invocations: std::vec![] + }] + } + ), + ] + ); + + assert_eq!(token_a.balance(&a), 50); + assert_eq!(token_a.balance(&b), 950); + + assert_eq!(token_b.balance(&a), 4500); + assert_eq!(token_b.balance(&b), 500); +} diff --git a/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/Cargo.toml b/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/Cargo.toml new file mode 100644 index 00000000..3f32f139 --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "test_token" +version.workspace = true +description = "Soroban standard token contract" +authors = ["Stellar Development Foundation "] +license = "Apache-2.0" +edition = "2021" +publish = false +rust-version.workspace = true + +[lib] +crate-type = ["cdylib"] + +[dependencies] +soroban-sdk = { workspace = true } +soroban-token-sdk = { workspace = true } + +[dev_dependencies] +soroban-sdk = { workspace = true, features = ["testutils"] } diff --git a/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/admin.rs b/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/admin.rs new file mode 100644 index 00000000..a820bf04 --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/admin.rs @@ -0,0 +1,18 @@ +use soroban_sdk::{Address, Env}; + +use crate::storage_types::DataKey; + +pub fn has_administrator(e: &Env) -> bool { + let key = DataKey::Admin; + e.storage().instance().has(&key) +} + +pub fn read_administrator(e: &Env) -> Address { + let key = DataKey::Admin; + e.storage().instance().get(&key).unwrap() +} + +pub fn write_administrator(e: &Env, id: &Address) { + let key = DataKey::Admin; + e.storage().instance().set(&key, id); +} diff --git a/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/allowance.rs b/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/allowance.rs new file mode 100644 index 00000000..ad746871 --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/allowance.rs @@ -0,0 +1,63 @@ +use crate::storage_types::{AllowanceDataKey, AllowanceValue, DataKey}; +use soroban_sdk::{Address, Env}; + +pub fn read_allowance(e: &Env, from: Address, spender: Address) -> AllowanceValue { + let key = DataKey::Allowance(AllowanceDataKey { from, spender }); + if let Some(allowance) = e.storage().temporary().get::<_, AllowanceValue>(&key) { + if allowance.expiration_ledger < e.ledger().sequence() { + AllowanceValue { + amount: 0, + expiration_ledger: allowance.expiration_ledger, + } + } else { + allowance + } + } else { + AllowanceValue { + amount: 0, + expiration_ledger: 0, + } + } +} + +pub fn write_allowance( + e: &Env, + from: Address, + spender: Address, + amount: i128, + expiration_ledger: u32, +) { + let allowance = AllowanceValue { + amount, + expiration_ledger, + }; + + if amount > 0 && expiration_ledger < e.ledger().sequence() { + panic!("expiration_ledger is less than ledger seq when amount > 0") + } + + let key = DataKey::Allowance(AllowanceDataKey { from, spender }); + e.storage().temporary().set(&key.clone(), &allowance); + + if amount > 0 { + let live_for = expiration_ledger + .checked_sub(e.ledger().sequence()) + .unwrap(); + + e.storage().temporary().extend_ttl(&key, live_for, live_for) + } +} + +pub fn spend_allowance(e: &Env, from: Address, spender: Address, amount: i128) { + let allowance = read_allowance(e, from.clone(), spender.clone()); + if allowance.amount < amount { + panic!("insufficient allowance"); + } + write_allowance( + e, + from, + spender, + allowance.amount - amount, + allowance.expiration_ledger, + ); +} diff --git a/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/balance.rs b/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/balance.rs new file mode 100644 index 00000000..76134e8d --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/balance.rs @@ -0,0 +1,35 @@ +use crate::storage_types::{DataKey, BALANCE_BUMP_AMOUNT, BALANCE_LIFETIME_THRESHOLD}; +use soroban_sdk::{Address, Env}; + +pub fn read_balance(e: &Env, addr: Address) -> i128 { + let key = DataKey::Balance(addr); + if let Some(balance) = e.storage().persistent().get::(&key) { + e.storage() + .persistent() + .extend_ttl(&key, BALANCE_LIFETIME_THRESHOLD, BALANCE_BUMP_AMOUNT); + balance + } else { + 0 + } +} + +fn write_balance(e: &Env, addr: Address, amount: i128) { + let key = DataKey::Balance(addr); + e.storage().persistent().set(&key, &amount); + e.storage() + .persistent() + .extend_ttl(&key, BALANCE_LIFETIME_THRESHOLD, BALANCE_BUMP_AMOUNT); +} + +pub fn receive_balance(e: &Env, addr: Address, amount: i128) { + let balance = read_balance(e, addr.clone()); + write_balance(e, addr, balance + amount); +} + +pub fn spend_balance(e: &Env, addr: Address, amount: i128) { + let balance = read_balance(e, addr.clone()); + if balance < amount { + panic!("insufficient balance"); + } + write_balance(e, addr, balance - amount); +} diff --git a/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/contract.rs b/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/contract.rs new file mode 100644 index 00000000..cc5690c6 --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/contract.rs @@ -0,0 +1,167 @@ +//! This contract demonstrates a sample implementation of the Soroban token +//! interface. +use crate::admin::{has_administrator, read_administrator, write_administrator}; +use crate::allowance::{read_allowance, spend_allowance, write_allowance}; +use crate::balance::{read_balance, receive_balance, spend_balance}; +use crate::metadata::{read_decimal, read_name, read_symbol, write_metadata}; +use crate::storage_types::{INSTANCE_BUMP_AMOUNT, INSTANCE_LIFETIME_THRESHOLD}; +use soroban_sdk::token::{self, Interface as _}; +use soroban_sdk::{contract, contractimpl, Address, Env, String}; +use soroban_token_sdk::metadata::TokenMetadata; +use soroban_token_sdk::TokenUtils; + +fn check_nonnegative_amount(amount: i128) { + if amount < 0 { + panic!("negative amount is not allowed: {}", amount) + } +} + +#[contract] +pub struct Token; + +#[contractimpl] +impl Token { + pub fn initialize(e: Env, admin: Address, decimal: u32, name: String, symbol: String) { + if has_administrator(&e) { + panic!("already initialized") + } + write_administrator(&e, &admin); + if decimal > u8::MAX.into() { + panic!("Decimal must fit in a u8"); + } + + write_metadata( + &e, + TokenMetadata { + decimal, + name, + symbol, + }, + ) + } + + pub fn mint(e: Env, to: Address, amount: i128) { + check_nonnegative_amount(amount); + let admin = read_administrator(&e); + admin.require_auth(); + + e.storage() + .instance() + .extend_ttl(INSTANCE_LIFETIME_THRESHOLD, INSTANCE_BUMP_AMOUNT); + + receive_balance(&e, to.clone(), amount); + TokenUtils::new(&e).events().mint(admin, to, amount); + } + + pub fn set_admin(e: Env, new_admin: Address) { + let admin = read_administrator(&e); + admin.require_auth(); + + e.storage() + .instance() + .extend_ttl(INSTANCE_LIFETIME_THRESHOLD, INSTANCE_BUMP_AMOUNT); + + write_administrator(&e, &new_admin); + TokenUtils::new(&e).events().set_admin(admin, new_admin); + } +} + +#[contractimpl] +impl token::Interface for Token { + fn allowance(e: Env, from: Address, spender: Address) -> i128 { + e.storage() + .instance() + .extend_ttl(INSTANCE_LIFETIME_THRESHOLD, INSTANCE_BUMP_AMOUNT); + read_allowance(&e, from, spender).amount + } + + fn approve(e: Env, from: Address, spender: Address, amount: i128, expiration_ledger: u32) { + from.require_auth(); + + check_nonnegative_amount(amount); + + e.storage() + .instance() + .extend_ttl(INSTANCE_LIFETIME_THRESHOLD, INSTANCE_BUMP_AMOUNT); + + write_allowance(&e, from.clone(), spender.clone(), amount, expiration_ledger); + TokenUtils::new(&e) + .events() + .approve(from, spender, amount, expiration_ledger); + } + + fn balance(e: Env, id: Address) -> i128 { + e.storage() + .instance() + .extend_ttl(INSTANCE_LIFETIME_THRESHOLD, INSTANCE_BUMP_AMOUNT); + read_balance(&e, id) + } + + fn transfer(e: Env, from: Address, to: Address, amount: i128) { + from.require_auth(); + + check_nonnegative_amount(amount); + + e.storage() + .instance() + .extend_ttl(INSTANCE_LIFETIME_THRESHOLD, INSTANCE_BUMP_AMOUNT); + + spend_balance(&e, from.clone(), amount); + receive_balance(&e, to.clone(), amount); + TokenUtils::new(&e).events().transfer(from, to, amount); + } + + fn transfer_from(e: Env, spender: Address, from: Address, to: Address, amount: i128) { + spender.require_auth(); + + check_nonnegative_amount(amount); + + e.storage() + .instance() + .extend_ttl(INSTANCE_LIFETIME_THRESHOLD, INSTANCE_BUMP_AMOUNT); + + spend_allowance(&e, from.clone(), spender, amount); + spend_balance(&e, from.clone(), amount); + receive_balance(&e, to.clone(), amount); + TokenUtils::new(&e).events().transfer(from, to, amount) + } + + fn burn(e: Env, from: Address, amount: i128) { + from.require_auth(); + + check_nonnegative_amount(amount); + + e.storage() + .instance() + .extend_ttl(INSTANCE_LIFETIME_THRESHOLD, INSTANCE_BUMP_AMOUNT); + + spend_balance(&e, from.clone(), amount); + TokenUtils::new(&e).events().burn(from, amount); + } + + fn burn_from(e: Env, spender: Address, from: Address, amount: i128) { + spender.require_auth(); + + check_nonnegative_amount(amount); + + e.storage() + .instance() + .extend_ttl(INSTANCE_LIFETIME_THRESHOLD, INSTANCE_BUMP_AMOUNT); + + spend_allowance(&e, from.clone(), spender, amount); + spend_balance(&e, from.clone(), amount); + TokenUtils::new(&e).events().burn(from, amount) + } + + fn decimals(e: Env) -> u32 { + read_decimal(&e) + } + + fn name(e: Env) -> String { + read_name(&e) + } + + fn symbol(e: Env) -> String { + read_symbol(&e) + } +} diff --git a/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/lib.rs b/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/lib.rs new file mode 100644 index 00000000..b5f04e4d --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/lib.rs @@ -0,0 +1,11 @@ +#![no_std] + +mod admin; +mod allowance; +mod balance; +mod contract; +mod metadata; +mod storage_types; +mod test; + +pub use crate::contract::TokenClient; diff --git a/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/metadata.rs b/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/metadata.rs new file mode 100644 index 00000000..715feeea --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/metadata.rs @@ -0,0 +1,22 @@ +use soroban_sdk::{Env, String}; +use soroban_token_sdk::{metadata::TokenMetadata, TokenUtils}; + +pub fn read_decimal(e: &Env) -> u32 { + let util = TokenUtils::new(e); + util.metadata().get_metadata().decimal +} + +pub fn read_name(e: &Env) -> String { + let util = TokenUtils::new(e); + util.metadata().get_metadata().name +} + +pub fn read_symbol(e: &Env) -> String { + let util = TokenUtils::new(e); + util.metadata().get_metadata().symbol +} + +pub fn write_metadata(e: &Env, metadata: TokenMetadata) { + let util = TokenUtils::new(e); + util.metadata().set_metadata(&metadata); +} diff --git a/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/storage_types.rs b/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/storage_types.rs new file mode 100644 index 00000000..5710c057 --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/storage_types.rs @@ -0,0 +1,31 @@ +use soroban_sdk::{contracttype, Address}; + +pub(crate) const DAY_IN_LEDGERS: u32 = 17280; +pub(crate) const INSTANCE_BUMP_AMOUNT: u32 = 7 * DAY_IN_LEDGERS; +pub(crate) const INSTANCE_LIFETIME_THRESHOLD: u32 = INSTANCE_BUMP_AMOUNT - DAY_IN_LEDGERS; + +pub(crate) const BALANCE_BUMP_AMOUNT: u32 = 30 * DAY_IN_LEDGERS; +pub(crate) const BALANCE_LIFETIME_THRESHOLD: u32 = BALANCE_BUMP_AMOUNT - DAY_IN_LEDGERS; + +#[derive(Clone)] +#[contracttype] +pub struct AllowanceDataKey { + pub from: Address, + pub spender: Address, +} + +#[contracttype] +pub struct AllowanceValue { + pub amount: i128, + pub expiration_ledger: u32, +} + +#[derive(Clone)] +#[contracttype] +pub enum DataKey { + Allowance(AllowanceDataKey), + Balance(Address), + Nonce(Address), + State(Address), + Admin, +} diff --git a/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/test.rs b/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/test.rs new file mode 100644 index 00000000..dbdb17a3 --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/test-wasms/token/src/test.rs @@ -0,0 +1,256 @@ +#![cfg(test)] +extern crate std; + +use crate::{contract::Token, TokenClient}; +use soroban_sdk::{ + symbol_short, + testutils::{Address as _, AuthorizedFunction, AuthorizedInvocation}, + Address, Env, IntoVal, Symbol, +}; + +fn create_token<'a>(e: &Env, admin: &Address) -> TokenClient<'a> { + let token = TokenClient::new(e, &e.register_contract(None, Token {})); + token.initialize(admin, &7, &"name".into_val(e), &"symbol".into_val(e)); + token +} + +#[test] +fn test() { + let e = Env::default(); + e.mock_all_auths(); + + let admin1 = Address::generate(&e); + let admin2 = Address::generate(&e); + let user1 = Address::generate(&e); + let user2 = Address::generate(&e); + let user3 = Address::generate(&e); + let token = create_token(&e, &admin1); + + token.mint(&user1, &1000); + assert_eq!( + e.auths(), + std::vec![( + admin1.clone(), + AuthorizedInvocation { + function: AuthorizedFunction::Contract(( + token.address.clone(), + symbol_short!("mint"), + (&user1, 1000_i128).into_val(&e), + )), + sub_invocations: std::vec![] + } + )] + ); + assert_eq!(token.balance(&user1), 1000); + + token.approve(&user2, &user3, &500, &200); + assert_eq!( + e.auths(), + std::vec![( + user2.clone(), + AuthorizedInvocation { + function: AuthorizedFunction::Contract(( + token.address.clone(), + symbol_short!("approve"), + (&user2, &user3, 500_i128, 200_u32).into_val(&e), + )), + sub_invocations: std::vec![] + } + )] + ); + assert_eq!(token.allowance(&user2, &user3), 500); + + token.transfer(&user1, &user2, &600); + assert_eq!( + e.auths(), + std::vec![( + user1.clone(), + AuthorizedInvocation { + function: AuthorizedFunction::Contract(( + token.address.clone(), + symbol_short!("transfer"), + (&user1, &user2, 600_i128).into_val(&e), + )), + sub_invocations: std::vec![] + } + )] + ); + assert_eq!(token.balance(&user1), 400); + assert_eq!(token.balance(&user2), 600); + + token.transfer_from(&user3, &user2, &user1, &400); + assert_eq!( + e.auths(), + std::vec![( + user3.clone(), + AuthorizedInvocation { + function: AuthorizedFunction::Contract(( + token.address.clone(), + Symbol::new(&e, "transfer_from"), + (&user3, &user2, &user1, 400_i128).into_val(&e), + )), + sub_invocations: std::vec![] + } + )] + ); + assert_eq!(token.balance(&user1), 800); + assert_eq!(token.balance(&user2), 200); + + token.transfer(&user1, &user3, &300); + assert_eq!(token.balance(&user1), 500); + assert_eq!(token.balance(&user3), 300); + + token.set_admin(&admin2); + assert_eq!( + e.auths(), + std::vec![( + admin1.clone(), + AuthorizedInvocation { + function: AuthorizedFunction::Contract(( + token.address.clone(), + symbol_short!("set_admin"), + (&admin2,).into_val(&e), + )), + sub_invocations: std::vec![] + } + )] + ); + + // Increase to 500 + token.approve(&user2, &user3, &500, &200); + assert_eq!(token.allowance(&user2, &user3), 500); + token.approve(&user2, &user3, &0, &200); + assert_eq!( + e.auths(), + std::vec![( + user2.clone(), + AuthorizedInvocation { + function: AuthorizedFunction::Contract(( + token.address.clone(), + symbol_short!("approve"), + (&user2, &user3, 0_i128, 200_u32).into_val(&e), + )), + sub_invocations: std::vec![] + } + )] + ); + assert_eq!(token.allowance(&user2, &user3), 0); +} + +#[test] +fn test_burn() { + let e = Env::default(); + e.mock_all_auths(); + + let admin = Address::generate(&e); + let user1 = Address::generate(&e); + let user2 = Address::generate(&e); + let token = create_token(&e, &admin); + + token.mint(&user1, &1000); + assert_eq!(token.balance(&user1), 1000); + + token.approve(&user1, &user2, &500, &200); + assert_eq!(token.allowance(&user1, &user2), 500); + + token.burn_from(&user2, &user1, &500); + assert_eq!( + e.auths(), + std::vec![( + user2.clone(), + AuthorizedInvocation { + function: AuthorizedFunction::Contract(( + token.address.clone(), + symbol_short!("burn_from"), + (&user2, &user1, 500_i128).into_val(&e), + )), + sub_invocations: std::vec![] + } + )] + ); + + assert_eq!(token.allowance(&user1, &user2), 0); + assert_eq!(token.balance(&user1), 500); + assert_eq!(token.balance(&user2), 0); + + token.burn(&user1, &500); + assert_eq!( + e.auths(), + std::vec![( + user1.clone(), + AuthorizedInvocation { + function: AuthorizedFunction::Contract(( + token.address.clone(), + symbol_short!("burn"), + (&user1, 500_i128).into_val(&e), + )), + sub_invocations: std::vec![] + } + )] + ); + + assert_eq!(token.balance(&user1), 0); + assert_eq!(token.balance(&user2), 0); +} + +#[test] +#[should_panic(expected = "insufficient balance")] +fn transfer_insufficient_balance() { + let e = Env::default(); + e.mock_all_auths(); + + let admin = Address::generate(&e); + let user1 = Address::generate(&e); + let user2 = Address::generate(&e); + let token = create_token(&e, &admin); + + token.mint(&user1, &1000); + assert_eq!(token.balance(&user1), 1000); + + token.transfer(&user1, &user2, &1001); +} + +#[test] +#[should_panic(expected = "insufficient allowance")] +fn transfer_from_insufficient_allowance() { + let e = Env::default(); + e.mock_all_auths(); + + let admin = Address::generate(&e); + let user1 = Address::generate(&e); + let user2 = Address::generate(&e); + let user3 = Address::generate(&e); + let token = create_token(&e, &admin); + + token.mint(&user1, &1000); + assert_eq!(token.balance(&user1), 1000); + + token.approve(&user1, &user3, &100, &200); + assert_eq!(token.allowance(&user1, &user3), 100); + + token.transfer_from(&user3, &user1, &user2, &101); +} + +#[test] +#[should_panic(expected = "already initialized")] +fn initialize_already_initialized() { + let e = Env::default(); + let admin = Address::generate(&e); + let token = create_token(&e, &admin); + + token.initialize(&admin, &10, &"name".into_val(&e), &"symbol".into_val(&e)); +} + +#[test] +#[should_panic(expected = "Decimal must fit in a u8")] +fn decimal_is_over_max() { + let e = Env::default(); + let admin = Address::generate(&e); + let token = TokenClient::new(&e, &e.register_contract(None, Token {})); + token.initialize( + &admin, + &(u32::from(u8::MAX) + 1), + &"name".into_val(&e), + &"symbol".into_val(&e), + ); +} diff --git a/cmd/crates/soroban-test/tests/fixtures/test-wasms/udt/Cargo.toml b/cmd/crates/soroban-test/tests/fixtures/test-wasms/udt/Cargo.toml new file mode 100644 index 00000000..20628480 --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/test-wasms/udt/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "test_udt" +version.workspace = true +authors = ["Stellar Development Foundation "] +license = "Apache-2.0" +edition = "2021" +publish = false +rust-version.workspace = true + +[lib] +crate-type = ["cdylib"] +doctest = false + +[dependencies] +soroban-sdk = { workspace = true } + +[dev-dependencies] +soroban-sdk = { workspace = true , features = ["testutils"]} diff --git a/cmd/crates/soroban-test/tests/fixtures/test-wasms/udt/src/lib.rs b/cmd/crates/soroban-test/tests/fixtures/test-wasms/udt/src/lib.rs new file mode 100644 index 00000000..695d8a7a --- /dev/null +++ b/cmd/crates/soroban-test/tests/fixtures/test-wasms/udt/src/lib.rs @@ -0,0 +1,112 @@ +#![no_std] +use soroban_sdk::{contract, contractimpl, contracttype, Vec}; + +#[contracttype] +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +pub enum UdtEnum2 { + A = 10, + B = 15, +} + +#[contracttype] +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum UdtEnum { + UdtA, + UdtB(UdtStruct), + UdtC(UdtEnum2), + UdtD(UdtTuple), +} + +#[contracttype] +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct UdtTuple(pub i64, pub Vec); + +#[contracttype] +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct UdtStruct { + a: i64, + b: i64, + pub c: Vec, +} + +#[contract] +pub struct Contract; + +#[contractimpl] +impl Contract { + #[allow(clippy::unnecessary_fold)] + pub fn add(a: UdtEnum, b: UdtEnum) -> i64 { + let a = match a { + UdtEnum::UdtA => 0, + UdtEnum::UdtB(udt) => udt.a + udt.b, + UdtEnum::UdtC(val) => val as i64, + UdtEnum::UdtD(tup) => tup.0 + tup.1.iter().fold(0i64, |sum, i| sum + i), + }; + let b = match b { + UdtEnum::UdtA => 0, + UdtEnum::UdtB(udt) => udt.a + udt.b, + UdtEnum::UdtC(val) => val as i64, + UdtEnum::UdtD(tup) => tup.0 + tup.1.iter().sum::(), + }; + a + b + } +} + +#[cfg(test)] +mod test { + use super::*; + use soroban_sdk::{vec, xdr::ScVal, Bytes, Env, TryFromVal}; + + #[test] + fn test_serializing() { + use soroban_sdk::xdr::ToXdr; + let e = Env::default(); + let udt = UdtStruct { + a: 10, + b: 12, + c: vec![&e, 1], + }; + let bin = udt.to_xdr(&e); + let expected_bytes = [ + 0u8, 0, 0, 17, 0, 0, 0, 1, 0, 0, 0, 3, 0, 0, 0, 15, 0, 0, 0, 1, 97, 0, 0, 0, 0, 0, 0, + 6, 0, 0, 0, 0, 0, 0, 0, 10, 0, 0, 0, 15, 0, 0, 0, 1, 98, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, + 0, 0, 0, 0, 12, 0, 0, 0, 15, 0, 0, 0, 1, 99, 0, 0, 0, 0, 0, 0, 16, 0, 0, 0, 1, 0, 0, 0, + 1, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 1, + ]; + let expected_bytes = Bytes::from_array(&e, &expected_bytes); + assert_eq!(bin, expected_bytes); + } + + #[test] + fn test_add() { + let e = Env::default(); + let contract_id = e.register_contract(None, Contract); + let client = ContractClient::new(&e, &contract_id); + + let udt = UdtStruct { + a: 10, + b: 12, + c: vec![&e, 1], + }; + let z = client.add(&UdtEnum::UdtA, &UdtEnum::UdtB(udt)); + assert_eq!(z, 22); + + let udt1 = UdtEnum2::A; + let udt2 = UdtTuple(1, vec![&e, 2, 3]); + let z = client.add(&UdtEnum::UdtC(udt1), &UdtEnum::UdtD(udt2)); + assert_eq!(z, 16); + } + + #[test] + fn test_scval_accessibility_from_udt_types() { + let e = Env::default(); + let udt = UdtStruct { + a: 10, + b: 12, + c: vec![&e, 1], + }; + let val: ScVal = udt.clone().try_into().unwrap(); + let roundtrip = UdtStruct::try_from_val(&e, &val).unwrap(); + assert_eq!(udt, roundtrip); + } +} diff --git a/cmd/crates/soroban-test/tests/it/arg_parsing.rs b/cmd/crates/soroban-test/tests/it/arg_parsing.rs new file mode 100644 index 00000000..c245fd8c --- /dev/null +++ b/cmd/crates/soroban-test/tests/it/arg_parsing.rs @@ -0,0 +1,215 @@ +use crate::util::CUSTOM_TYPES; +use serde_json::json; +use soroban_env_host::xdr::{ + ScBytes, ScSpecTypeBytesN, ScSpecTypeDef, ScSpecTypeOption, ScSpecTypeUdt, ScVal, +}; +use soroban_spec_tools::{from_string_primitive, Spec}; + +#[test] +fn parse_bool() { + println!( + "{:#?}", + from_string_primitive("true", &ScSpecTypeDef::Bool,).unwrap() + ); +} + +#[test] +fn parse_null() { + let parsed = from_string_primitive( + "null", + &ScSpecTypeDef::Option(Box::new(ScSpecTypeOption { + value_type: Box::new(ScSpecTypeDef::Bool), + })), + ) + .unwrap(); + println!("{parsed:#?}"); + assert!(parsed == ScVal::Void); +} + +#[test] +fn parse_u32() { + let u32_ = 42u32; + let res = &format!("{u32_}"); + println!( + "{:#?}", + from_string_primitive(res, &ScSpecTypeDef::U32,).unwrap() + ); +} + +#[test] +fn parse_i32() { + let i32_ = -42_i32; + let res = &format!("{i32_}"); + println!( + "{:#?}", + from_string_primitive(res, &ScSpecTypeDef::I32,).unwrap() + ); +} + +#[test] +fn parse_u64() { + let b = 42_000_000_000u64; + let res = &format!("{b}"); + println!( + "{:#?}", + from_string_primitive(res, &ScSpecTypeDef::U64,).unwrap() + ); +} + +#[test] +fn parse_u128() { + let b = 340_000_000_000_000_000_000_000_000_000_000_000_000u128; + let res = &format!("{b}"); + println!( + "{:#?}", + from_string_primitive(res, &ScSpecTypeDef::U128,).unwrap() + ); +} + +#[test] +fn parse_i128() { + let b = -170_000_000_000_000_000_000_000_000_000_000_000_000i128; + let res = &format!("{b}"); + println!( + "{:#?}", + from_string_primitive(res, &ScSpecTypeDef::I128,).unwrap() + ); +} + +#[test] +fn parse_i256() { + let b = -170_000_000_000_000_000_000_000_000_000_000_000_000i128; + let res = &format!("{b}"); + let entries = get_spec(); + entries.from_string(res, &ScSpecTypeDef::I256).unwrap(); + println!( + "{:#?}", + from_string_primitive(res, &ScSpecTypeDef::I256,).unwrap() + ); +} + +#[test] +fn parse_bytes() { + let b = from_string_primitive(r"beefface", &ScSpecTypeDef::Bytes).unwrap(); + assert_eq!( + b, + ScVal::Bytes(ScBytes(vec![0xbe, 0xef, 0xfa, 0xce].try_into().unwrap())) + ); + println!("{b:#?}"); +} + +#[test] +fn parse_bytes_when_hex_is_all_numbers() { + let b = from_string_primitive(r"4554", &ScSpecTypeDef::Bytes).unwrap(); + assert_eq!( + b, + ScVal::Bytes(ScBytes(vec![0x45, 0x54].try_into().unwrap())) + ); + println!("{b:#?}"); +} + +#[test] +fn parse_bytesn() { + let b = from_string_primitive( + r"beefface", + &ScSpecTypeDef::BytesN(ScSpecTypeBytesN { n: 4 }), + ) + .unwrap(); + assert_eq!( + b, + ScVal::Bytes(ScBytes(vec![0xbe, 0xef, 0xfa, 0xce].try_into().unwrap())) + ); + println!("{b:#?}"); +} + +#[test] +fn parse_bytesn_when_hex_is_all_numbers() { + let b = + from_string_primitive(r"4554", &ScSpecTypeDef::BytesN(ScSpecTypeBytesN { n: 2 })).unwrap(); + assert_eq!( + b, + ScVal::Bytes(ScBytes(vec![0x45, 0x54].try_into().unwrap())) + ); + println!("{b:#?}",); +} + +#[test] +fn parse_symbol() { + // let b = "hello"; + // let res = &parse_json(&HashMap::new(), &ScSpecTypeDef::Symbol, &json! {b}).unwrap(); + // println!("{res}"); + println!( + "{:#?}", + from_string_primitive(r#""hello""#, &ScSpecTypeDef::Symbol).unwrap() + ); +} + +#[test] +fn parse_symbol_with_no_quotation_marks() { + // let b = "hello"; + // let res = &parse_json(&HashMap::new(), &ScSpecTypeDef::Symbol, &json! {b}).unwrap(); + // println!("{res}"); + println!( + "{:#?}", + from_string_primitive("hello", &ScSpecTypeDef::Symbol).unwrap() + ); +} + +#[test] +fn parse_optional_symbol_with_no_quotation_marks() { + let parsed = from_string_primitive( + "hello", + &ScSpecTypeDef::Option(Box::new(ScSpecTypeOption { + value_type: Box::new(ScSpecTypeDef::Symbol), + })), + ) + .unwrap(); + println!("{parsed:#?}"); + assert!(parsed == ScVal::Symbol("hello".try_into().unwrap())); +} + +#[test] +fn parse_optional_bool_with_no_quotation_marks() { + let parsed = from_string_primitive( + "true", + &ScSpecTypeDef::Option(Box::new(ScSpecTypeOption { + value_type: Box::new(ScSpecTypeDef::Bool), + })), + ) + .unwrap(); + println!("{parsed:#?}"); + assert!(parsed == ScVal::Bool(true)); +} + +#[test] +fn parse_obj() { + let type_ = &ScSpecTypeDef::Udt(ScSpecTypeUdt { + name: "Test".parse().unwrap(), + }); + let entries = get_spec(); + let val = &json!({"a": 42, "b": false, "c": "world"}); + println!("{:#?}", entries.from_json(val, type_)); +} + +#[test] +fn parse_enum() { + let entries = get_spec(); + let func = entries.find_function("simple").unwrap(); + println!("{func:#?}"); + let type_ = &func.inputs.as_slice()[0].type_; + println!("{:#?}", entries.from_json(&json!("First"), type_)); +} + +#[test] +fn parse_enum_const() { + let entries = get_spec(); + let func = entries.find_function("card").unwrap(); + println!("{func:#?}"); + let type_ = &func.inputs.as_slice()[0].type_; + println!("{:#?}", entries.from_json(&json!(11), type_)); +} + +fn get_spec() -> Spec { + let res = soroban_spec::read::from_wasm(&CUSTOM_TYPES.bytes()).unwrap(); + Spec(Some(res)) +} diff --git a/cmd/crates/soroban-test/tests/it/config.rs b/cmd/crates/soroban-test/tests/it/config.rs new file mode 100644 index 00000000..5912b2cf --- /dev/null +++ b/cmd/crates/soroban-test/tests/it/config.rs @@ -0,0 +1,223 @@ +use assert_fs::TempDir; +use soroban_test::TestEnv; +use std::{fs, path::Path}; + +use crate::util::{add_key, add_test_id, SecretKind, DEFAULT_SEED_PHRASE}; +use soroban_cli::commands::network; + +const NETWORK_PASSPHRASE: &str = "Local Sandbox Stellar Network ; September 2022"; + +#[test] +fn set_and_remove_network() { + TestEnv::with_default(|sandbox| { + add_network(sandbox, "local"); + let dir = sandbox.dir().join(".soroban").join("network"); + let read_dir = std::fs::read_dir(dir); + println!("{read_dir:#?}"); + let file = read_dir.unwrap().next().unwrap().unwrap(); + assert_eq!(file.file_name().to_str().unwrap(), "local.toml"); + + let res = sandbox.cmd::(""); + let res = res.ls().unwrap(); + assert_eq!(res.len(), 1); + assert_eq!(&res[0], "local"); + + sandbox.cmd::("local").run().unwrap(); + + // sandbox + // .new_assert_cmd("config") + // .arg("network") + // .arg("rm") + // .arg("local") + // .assert() + // .stdout(""); + sandbox + .new_assert_cmd("network") + .arg("ls") + .assert() + .stdout("\n"); + }); +} + +fn add_network(sandbox: &TestEnv, name: &str) { + sandbox + .new_assert_cmd("network") + .arg("add") + .args([ + "--rpc-url=https://127.0.0.1", + "--network-passphrase", + NETWORK_PASSPHRASE, + name, + ]) + .assert() + .success() + .stderr("") + .stdout(""); +} + +fn add_network_global(sandbox: &TestEnv, dir: &Path, name: &str) { + sandbox + .new_assert_cmd("network") + .env("XDG_CONFIG_HOME", dir.to_str().unwrap()) + .arg("add") + .arg("--global") + .arg("--rpc-url") + .arg("https://127.0.0.1") + .arg("--network-passphrase") + .arg("Local Sandbox Stellar Network ; September 2022") + .arg(name) + .assert() + .success(); +} + +#[test] +fn set_and_remove_global_network() { + let sandbox = TestEnv::default(); + let dir = TempDir::new().unwrap(); + + add_network_global(&sandbox, &dir, "global"); + + sandbox + .new_assert_cmd("network") + .env("XDG_CONFIG_HOME", dir.to_str().unwrap()) + .arg("ls") + .arg("--global") + .assert() + .stdout("global\n"); + + sandbox + .new_assert_cmd("network") + .env("XDG_CONFIG_HOME", dir.to_str().unwrap()) + .arg("rm") + .arg("--global") + .arg("global") + .assert() + .stdout(""); + + sandbox + .new_assert_cmd("network") + .env("XDG_CONFIG_HOME", dir.to_str().unwrap()) + .arg("ls") + .assert() + .stdout("\n"); +} + +#[test] +fn multiple_networks() { + let sandbox = TestEnv::default(); + let ls = || -> Vec { sandbox.cmd::("").ls().unwrap() }; + + add_network(&sandbox, "local"); + println!("{:#?}", ls()); + add_network(&sandbox, "local2"); + + assert_eq!(ls().as_slice(), ["local".to_owned(), "local2".to_owned()]); + + sandbox.cmd::("local").run().unwrap(); + + assert_eq!(ls().as_slice(), ["local2".to_owned()]); + + let sub_dir = sandbox.dir().join("sub_directory"); + fs::create_dir(&sub_dir).unwrap(); + + TestEnv::cmd_arr_with_pwd::( + &[ + "--rpc-url", + "https://127.0.0.1", + "--network-passphrase", + "Local Sandbox Stellar Network ; September 2022", + "local3", + ], + &sub_dir, + ) + .run() + .unwrap(); + + assert_eq!(ls().as_slice(), ["local2".to_owned(), "local3".to_owned()]); +} + +#[test] +fn read_key() { + let sandbox = TestEnv::default(); + let dir = sandbox.dir().as_ref(); + add_test_id(dir); + let ident_dir = dir.join(".soroban/identity"); + assert!(ident_dir.exists()); + sandbox + .new_assert_cmd("keys") + .arg("ls") + .assert() + .stdout(predicates::str::contains("test_id\n")); +} + +#[test] +fn generate_key() { + let sandbox = TestEnv::default(); + sandbox + .new_assert_cmd("keys") + .arg("generate") + .arg("--network=futurenet") + .arg("--no-fund") + .arg("--seed") + .arg("0000000000000000") + .arg("test_2") + .assert() + .stdout("") + .success(); + + sandbox + .new_assert_cmd("keys") + .arg("ls") + .assert() + .stdout(predicates::str::contains("test_2\n")); + let file_contents = + fs::read_to_string(sandbox.dir().join(".soroban/identity/test_2.toml")).unwrap(); + assert_eq!( + file_contents, + format!("seed_phrase = \"{DEFAULT_SEED_PHRASE}\"\n") + ); +} + +#[test] +fn seed_phrase() { + let sandbox = TestEnv::default(); + let dir = sandbox.dir(); + add_key( + dir, + "test_seed", + SecretKind::Seed, + "one two three four five six seven eight nine ten eleven twelve", + ); + + sandbox + .new_assert_cmd("keys") + .current_dir(dir) + .arg("ls") + .assert() + .stdout(predicates::str::contains("test_seed\n")); +} + +#[test] +fn use_env() { + let sandbox = TestEnv::default(); + + sandbox + .new_assert_cmd("keys") + .env( + "SOROBAN_SECRET_KEY", + "SDIY6AQQ75WMD4W46EYB7O6UYMHOCGQHLAQGQTKHDX4J2DYQCHVCQYFD", + ) + .arg("add") + .arg("bob") + .assert() + .stdout("") + .success(); + + sandbox + .new_assert_cmd("keys") + .arg("show") + .arg("bob") + .assert() + .success() + .stdout("SDIY6AQQ75WMD4W46EYB7O6UYMHOCGQHLAQGQTKHDX4J2DYQCHVCQYFD\n"); +} diff --git a/cmd/crates/soroban-test/tests/it/hello_world.rs b/cmd/crates/soroban-test/tests/it/hello_world.rs new file mode 100644 index 00000000..4c45403a --- /dev/null +++ b/cmd/crates/soroban-test/tests/it/hello_world.rs @@ -0,0 +1,22 @@ +use soroban_cli::commands::contract::{self, fetch}; +use soroban_test::TestEnv; +use std::path::PathBuf; + +use crate::util::{ + add_test_seed, is_rpc, network_passphrase, network_passphrase_arg, rpc_url, rpc_url_arg, + DEFAULT_PUB_KEY, DEFAULT_PUB_KEY_1, DEFAULT_SECRET_KEY, DEFAULT_SEED_PHRASE, HELLO_WORLD, + TEST_SALT, +}; + +#[tokio::test] +async fn fetch() { + if !is_rpc() { + return; + } + let e = TestEnv::default(); + let f = e.dir().join("contract.wasm"); + let id = deploy_hello(&e); + let cmd = e.cmd_arr::(&["--id", &id, "--out-file", f.to_str().unwrap()]); + cmd.run().await.unwrap(); + assert!(f.exists()); +} diff --git a/cmd/crates/soroban-test/tests/it/help.rs b/cmd/crates/soroban-test/tests/it/help.rs new file mode 100644 index 00000000..6d4680e7 --- /dev/null +++ b/cmd/crates/soroban-test/tests/it/help.rs @@ -0,0 +1,97 @@ +use soroban_cli::commands::contract; +use soroban_test::TestEnv; + +use crate::util::{invoke_custom as invoke, CUSTOM_TYPES}; + +async fn invoke_custom(func: &str, args: &str) -> Result { + let e = &TestEnv::default(); + invoke(e, "1", func, args, &CUSTOM_TYPES.path()).await +} + +#[tokio::test] +async fn generate_help() { + assert!(invoke_custom("strukt_hel", "--help") + .await + .unwrap() + .contains("Example contract method which takes a struct")); +} + +#[tokio::test] +async fn vec_help() { + assert!(invoke_custom("vec", "--help") + .await + .unwrap() + .contains("Array")); +} + +#[tokio::test] +async fn tuple_help() { + assert!(invoke_custom("tuple", "--help") + .await + .unwrap() + .contains("Tuple")); +} + +#[tokio::test] +async fn strukt_help() { + let output = invoke_custom("strukt", "--help").await.unwrap(); + assert!(output.contains("--strukt '{ \"a\": 1, \"b\": true, \"c\": \"hello\" }'",)); + assert!(output.contains("This is from the rust doc above the struct Test",)); +} + +#[tokio::test] +async fn complex_enum_help() { + let output = invoke_custom("complex", "--help").await.unwrap(); + assert!(output.contains(r#"--complex '{"Struct":{ "a": 1, "b": true, "c": "hello" }}"#,)); + assert!(output.contains(r#"{"Tuple":[{ "a": 1, "b": true, "c": "hello" }"#,)); + assert!(output.contains(r#"{"Enum":"First"|"Second"|"Third"}"#,)); + assert!(output.contains( + r#"{"Asset":["GDIY6AQQ75WMD4W46EYB7O6UYMHOCGQHLAQGQTKHDX4J2DYQCHVCR4W4", "-100"]}"#, + )); + assert!(output.contains(r#""Void"'"#)); +} + +#[tokio::test] +async fn multi_arg_failure() { + assert!(matches!( + invoke_custom("multi_args", "--b").await.unwrap_err(), + contract::invoke::Error::MissingArgument(_) + )); +} + +#[tokio::test] +async fn handle_arg_larger_than_i32_failure() { + let res = invoke_custom("i32_", &format!("--i32_={}", u32::MAX)).await; + assert!(matches!( + res, + Err(contract::invoke::Error::CannotParseArg { .. }) + )); +} + +#[tokio::test] +async fn handle_arg_larger_than_i64_failure() { + let res = invoke_custom("i64_", &format!("--i64_={}", u64::MAX)).await; + assert!(matches!( + res, + Err(contract::invoke::Error::CannotParseArg { .. }) + )); +} + +#[test] +fn build() { + let sandbox = TestEnv::default(); + let cargo_dir = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")); + let hello_world_contract_path = + cargo_dir.join("tests/fixtures/test-wasms/hello_world/Cargo.toml"); + sandbox + .new_assert_cmd("contract") + .arg("build") + .arg("--manifest-path") + .arg(hello_world_contract_path) + .arg("--profile") + .arg("test-wasms") + .arg("--package") + .arg("test_hello_world") + .assert() + .success(); +} diff --git a/cmd/crates/soroban-test/tests/it/integration.rs b/cmd/crates/soroban-test/tests/it/integration.rs new file mode 100644 index 00000000..4e92b931 --- /dev/null +++ b/cmd/crates/soroban-test/tests/it/integration.rs @@ -0,0 +1,5 @@ +mod custom_types; +mod dotenv; +mod hello_world; +mod util; +mod wrap; diff --git a/cmd/crates/soroban-test/tests/it/integration/custom_types.rs b/cmd/crates/soroban-test/tests/it/integration/custom_types.rs new file mode 100644 index 00000000..fda2c1f6 --- /dev/null +++ b/cmd/crates/soroban-test/tests/it/integration/custom_types.rs @@ -0,0 +1,419 @@ +use serde_json::json; + +use soroban_cli::commands; +use soroban_test::TestEnv; + +use crate::integration::util::{deploy_custom, extend_contract, CUSTOM_TYPES}; + +use super::util::invoke_with_roundtrip; + +fn invoke_custom(e: &TestEnv, id: &str, func: &str) -> assert_cmd::Command { + let mut s = e.new_assert_cmd("contract"); + s.arg("invoke").arg("--id").arg(id).arg("--").arg(func); + s +} + +#[tokio::test] +async fn parse() { + let sandbox = &TestEnv::default(); + let id = &deploy_custom(sandbox); + extend_contract(sandbox, id, CUSTOM_TYPES).await; + symbol(sandbox, id); + string_with_quotes(sandbox, id).await; + symbol_with_quotes(sandbox, id).await; + multi_arg_success(sandbox, id); + bytes_as_file(sandbox, id); + map(sandbox, id).await; + vec_(sandbox, id).await; + tuple(sandbox, id).await; + strukt(sandbox, id).await; + tuple_strukt(sandbox, id).await; + enum_2_str(sandbox, id).await; + e_2_s_enum(sandbox, id).await; + asset(sandbox, id).await; + e_2_s_tuple(sandbox, id).await; + e_2_s_strukt(sandbox, id).await; + number_arg(sandbox, id).await; + number_arg_return_err(sandbox, id).await; + i32(sandbox, id).await; + i64(sandbox, id).await; + negative_i32(sandbox, id).await; + negative_i64(sandbox, id).await; + account_address(sandbox, id).await; + contract_address(sandbox, id).await; + bytes(sandbox, id).await; + const_enum(sandbox, id).await; + number_arg_return_ok(sandbox, id); + void(sandbox, id); + val(sandbox, id); + parse_u128(sandbox, id); + parse_i128(sandbox, id); + parse_negative_i128(sandbox, id); + parse_u256(sandbox, id); + parse_i256(sandbox, id); + parse_negative_i256(sandbox, id); + boolean(sandbox, id); + boolean_two(sandbox, id); + boolean_no_flag(sandbox, id); + boolean_false(sandbox, id); + boolean_not(sandbox, id); + boolean_not_no_flag(sandbox, id); + option_none(sandbox, id); + option_some(sandbox, id); +} + +fn symbol(sandbox: &TestEnv, id: &str) { + invoke_custom(sandbox, id, "hello") + .arg("--hello") + .arg("world") + .assert() + .success() + .stdout( + r#""world" +"#, + ); +} + +async fn string_with_quotes(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip(sandbox, id, "string", json!("hello world")).await; +} + +async fn symbol_with_quotes(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip(sandbox, id, "hello", json!("world")).await; +} + +fn multi_arg_success(sandbox: &TestEnv, id: &str) { + invoke_custom(sandbox, id, "multi_args") + .arg("--a") + .arg("42") + .arg("--b") + .assert() + .success() + .stdout("42\n"); +} + +fn bytes_as_file(sandbox: &TestEnv, id: &str) { + let env = &TestEnv::default(); + let path = env.temp_dir.join("bytes.txt"); + std::fs::write(&path, 0x0073_7465_6c6c_6172u128.to_be_bytes()).unwrap(); + invoke_custom(sandbox, id, "bytes") + .arg("--bytes-file-path") + .arg(path) + .assert() + .success() + .stdout("\"0000000000000000007374656c6c6172\"\n"); +} + +async fn map(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip(sandbox, id, "map", json!({"0": true, "1": false})).await; +} + +async fn vec_(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip(sandbox, id, "vec", json!([0, 1])).await; +} + +async fn tuple(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip(sandbox, id, "tuple", json!(["hello", 0])).await; +} + +async fn strukt(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip( + sandbox, + id, + "strukt", + json!({"a": 42, "b": true, "c": "world"}), + ) + .await; +} + +async fn tuple_strukt(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip( + sandbox, + id, + "tuple_strukt", + json!([{"a": 42, "b": true, "c": "world"}, "First"]), + ) + .await; +} + +async fn enum_2_str(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip(sandbox, id, "simple", json!("First")).await; +} + +async fn e_2_s_enum(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip(sandbox, id, "complex", json!({"Enum": "First"})).await; +} + +async fn asset(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip( + sandbox, + id, + "complex", + json!({"Asset": ["CB64D3G7SM2RTH6JSGG34DDTFTQ5CFDKVDZJZSODMCX4NJ2HV2KN7OHT", "100" ]}), + ) + .await; +} + +fn complex_tuple() -> serde_json::Value { + json!({"Tuple": [{"a": 42, "b": true, "c": "world"}, "First"]}) +} + +async fn e_2_s_tuple(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip(sandbox, id, "complex", complex_tuple()).await; +} + +async fn e_2_s_strukt(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip( + sandbox, + id, + "complex", + json!({"Struct": {"a": 42, "b": true, "c": "world"}}), + ) + .await; +} + +async fn number_arg(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip(sandbox, id, "u32_", 42).await; +} + +fn number_arg_return_ok(sandbox: &TestEnv, id: &str) { + invoke_custom(sandbox, id, "u32_fail_on_even") + .arg("--u32_") + .arg("1") + .assert() + .success() + .stdout("1\n"); +} + +async fn number_arg_return_err(sandbox: &TestEnv, id: &str) { + let res = sandbox + .invoke(&["--id", id, "--", "u32_fail_on_even", "--u32_=2"]) + .await + .unwrap_err(); + if let commands::contract::invoke::Error::ContractInvoke(name, doc) = &res { + assert_eq!(name, "NumberMustBeOdd"); + assert_eq!(doc, "Please provide an odd number"); + }; + println!("{res:#?}"); +} + +fn void(sandbox: &TestEnv, id: &str) { + invoke_custom(sandbox, id, "woid") + .assert() + .success() + .stdout("\n") + .stderr(""); +} + +fn val(sandbox: &TestEnv, id: &str) { + invoke_custom(sandbox, id, "val") + .assert() + .success() + .stdout("null\n") + .stderr(""); +} + +async fn i32(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip(sandbox, id, "i32_", 42).await; +} + +async fn i64(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip(sandbox, id, "i64_", i64::MAX).await; +} + +async fn negative_i32(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip(sandbox, id, "i32_", -42).await; +} + +async fn negative_i64(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip(sandbox, id, "i64_", i64::MIN).await; +} + +async fn account_address(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip( + sandbox, + id, + "addresse", + json!("GD5KD2KEZJIGTC63IGW6UMUSMVUVG5IHG64HUTFWCHVZH2N2IBOQN7PS"), + ) + .await; +} + +async fn contract_address(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip( + sandbox, + id, + "addresse", + json!("CA3D5KRYM6CB7OWQ6TWYRR3Z4T7GNZLKERYNZGGA5SOAOPIFY6YQGAXE"), + ) + .await; +} + +async fn bytes(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip(sandbox, id, "bytes", json!("7374656c6c6172")).await; +} + +async fn const_enum(sandbox: &TestEnv, id: &str) { + invoke_with_roundtrip(sandbox, id, "card", "11").await; +} + +fn parse_u128(sandbox: &TestEnv, id: &str) { + let num = "340000000000000000000000000000000000000"; + invoke_custom(sandbox, id, "u128") + .arg("--u128") + .arg(num) + .assert() + .success() + .stdout(format!( + r#""{num}" +"#, + )); +} + +fn parse_i128(sandbox: &TestEnv, id: &str) { + let num = "170000000000000000000000000000000000000"; + invoke_custom(sandbox, id, "i128") + .arg("--i128") + .arg(num) + .assert() + .success() + .stdout(format!( + r#""{num}" +"#, + )); +} + +fn parse_negative_i128(sandbox: &TestEnv, id: &str) { + let num = "-170000000000000000000000000000000000000"; + invoke_custom(sandbox, id, "i128") + .arg("--i128") + .arg(num) + .assert() + .success() + .stdout(format!( + r#""{num}" +"#, + )); +} + +fn parse_u256(sandbox: &TestEnv, id: &str) { + let num = "340000000000000000000000000000000000000"; + invoke_custom(sandbox, id, "u256") + .arg("--u256") + .arg(num) + .assert() + .success() + .stdout(format!( + r#""{num}" +"#, + )); +} + +fn parse_i256(sandbox: &TestEnv, id: &str) { + let num = "170000000000000000000000000000000000000"; + invoke_custom(sandbox, id, "i256") + .arg("--i256") + .arg(num) + .assert() + .success() + .stdout(format!( + r#""{num}" +"#, + )); +} + +fn parse_negative_i256(sandbox: &TestEnv, id: &str) { + let num = "-170000000000000000000000000000000000000"; + invoke_custom(sandbox, id, "i256") + .arg("--i256") + .arg(num) + .assert() + .success() + .stdout(format!( + r#""{num}" +"#, + )); +} + +fn boolean(sandbox: &TestEnv, id: &str) { + invoke_custom(sandbox, id, "boolean") + .arg("--boolean") + .assert() + .success() + .stdout( + r"true +", + ); +} +fn boolean_two(sandbox: &TestEnv, id: &str) { + invoke_custom(sandbox, id, "boolean") + .arg("--boolean") + .arg("true") + .assert() + .success() + .stdout( + r"true +", + ); +} + +fn boolean_no_flag(sandbox: &TestEnv, id: &str) { + invoke_custom(sandbox, id, "boolean") + .assert() + .success() + .stdout( + r"false +", + ); +} + +fn boolean_false(sandbox: &TestEnv, id: &str) { + invoke_custom(sandbox, id, "boolean") + .arg("--boolean") + .arg("false") + .assert() + .success() + .stdout( + r"false +", + ); +} + +fn boolean_not(sandbox: &TestEnv, id: &str) { + invoke_custom(sandbox, id, "not") + .arg("--boolean") + .assert() + .success() + .stdout( + r"false +", + ); +} + +fn boolean_not_no_flag(sandbox: &TestEnv, id: &str) { + invoke_custom(sandbox, id, "not").assert().success().stdout( + r"true +", + ); +} + +fn option_none(sandbox: &TestEnv, id: &str) { + invoke_custom(sandbox, id, "option") + .assert() + .success() + .stdout( + r"null +", + ); +} + +fn option_some(sandbox: &TestEnv, id: &str) { + invoke_custom(sandbox, id, "option") + .arg("--option=1") + .assert() + .success() + .stdout( + r"1 +", + ); +} diff --git a/cmd/crates/soroban-test/tests/it/integration/dotenv.rs b/cmd/crates/soroban-test/tests/it/integration/dotenv.rs new file mode 100644 index 00000000..d7d56aaf --- /dev/null +++ b/cmd/crates/soroban-test/tests/it/integration/dotenv.rs @@ -0,0 +1,64 @@ +use soroban_test::TestEnv; + +use super::util::{deploy_hello, TEST_CONTRACT_ID}; + +fn write_env_file(e: &TestEnv, contents: &str) { + let env_file = e.dir().join(".env"); + std::fs::write(&env_file, contents).unwrap(); + assert_eq!(contents, std::fs::read_to_string(env_file).unwrap()); +} + +fn contract_id() -> String { + format!("SOROBAN_CONTRACT_ID={TEST_CONTRACT_ID}") +} + +#[test] +fn can_read_file() { + TestEnv::with_default(|e| { + deploy_hello(e); + write_env_file(e, &contract_id()); + e.new_assert_cmd("contract") + .arg("invoke") + .arg("--") + .arg("hello") + .arg("--world=world") + .assert() + .stdout("[\"Hello\",\"world\"]\n") + .success(); + }); +} + +#[test] +fn current_env_not_overwritten() { + TestEnv::with_default(|e| { + deploy_hello(e); + write_env_file(e, &contract_id()); + + e.new_assert_cmd("contract") + .env("SOROBAN_CONTRACT_ID", "2") + .arg("invoke") + .arg("--") + .arg("hello") + .arg("--world=world") + .assert() + .stderr("error: Contract not found: CAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFCT4\n"); + }); +} + +#[test] +fn cli_args_have_priority() { + TestEnv::with_default(|e| { + deploy_hello(e); + write_env_file(e, &contract_id()); + e.new_assert_cmd("contract") + .env("SOROBAN_CONTRACT_ID", "2") + .arg("invoke") + .arg("--id") + .arg(TEST_CONTRACT_ID) + .arg("--") + .arg("hello") + .arg("--world=world") + .assert() + .stdout("[\"Hello\",\"world\"]\n"); + }); +} diff --git a/cmd/crates/soroban-test/tests/it/integration/hello_world.rs b/cmd/crates/soroban-test/tests/it/integration/hello_world.rs new file mode 100644 index 00000000..7714f70d --- /dev/null +++ b/cmd/crates/soroban-test/tests/it/integration/hello_world.rs @@ -0,0 +1,290 @@ +use soroban_cli::commands::{ + contract::{self, fetch}, + keys, +}; +use soroban_test::TestEnv; + +use crate::{integration::util::extend_contract, util::DEFAULT_SEED_PHRASE}; + +use super::util::{ + add_test_seed, deploy_hello, extend, network_passphrase, network_passphrase_arg, rpc_url, + rpc_url_arg, DEFAULT_PUB_KEY, DEFAULT_PUB_KEY_1, DEFAULT_SECRET_KEY, HELLO_WORLD, +}; + +#[tokio::test] +#[ignore] +async fn invoke() { + let sandbox = &TestEnv::default(); + let id = &deploy_hello(sandbox); + extend_contract(sandbox, id, HELLO_WORLD).await; + // Note that all functions tested here have no state + invoke_hello_world(sandbox, id); + invoke_hello_world_with_lib(sandbox, id).await; + invoke_hello_world_with_lib_two(sandbox, id).await; + invoke_auth(sandbox, id); + invoke_auth_with_identity(sandbox, id).await; + invoke_auth_with_different_test_account_fail(sandbox, id).await; + // invoke_auth_with_different_test_account(sandbox, id); + contract_data_read_failure(sandbox, id); + invoke_with_seed(sandbox, id).await; + invoke_with_sk(sandbox, id).await; + // This does add an identity to local config + invoke_with_id(sandbox, id).await; + handles_kebab_case(sandbox, id).await; + fetch(sandbox, id).await; + invoke_prng_u64_in_range_test(sandbox, id).await; +} + +fn invoke_hello_world(sandbox: &TestEnv, id: &str) { + sandbox + .new_assert_cmd("contract") + .arg("invoke") + .arg("--id") + .arg(id) + .arg("--") + .arg("hello") + .arg("--world=world") + .assert() + .stdout("[\"Hello\",\"world\"]\n") + .success(); +} + +async fn invoke_hello_world_with_lib(e: &TestEnv, id: &str) { + let mut cmd = contract::invoke::Cmd { + contract_id: id.to_string(), + slop: vec!["hello".into(), "--world=world".into()], + ..Default::default() + }; + + cmd.config.network.rpc_url = rpc_url(); + cmd.config.network.network_passphrase = network_passphrase(); + + let res = e.invoke_cmd(cmd).await.unwrap(); + assert_eq!(res, r#"["Hello","world"]"#); +} + +async fn invoke_hello_world_with_lib_two(e: &TestEnv, id: &str) { + let hello_world = HELLO_WORLD.to_string(); + let mut invoke_args = vec!["--id", id, "--wasm", hello_world.as_str()]; + let args = vec!["--", "hello", "--world=world"]; + let res = + if let (Some(rpc), Some(network_passphrase)) = (rpc_url_arg(), network_passphrase_arg()) { + invoke_args.push(&rpc); + invoke_args.push(&network_passphrase); + e.invoke(&[invoke_args, args].concat()).await.unwrap() + } else { + e.invoke(&[invoke_args, args].concat()).await.unwrap() + }; + assert_eq!(res, r#"["Hello","world"]"#); +} + +fn invoke_auth(sandbox: &TestEnv, id: &str) { + sandbox + .new_assert_cmd("contract") + .arg("invoke") + .arg("--id") + .arg(id) + .arg("--wasm") + .arg(HELLO_WORLD.path()) + .arg("--") + .arg("auth") + .arg(&format!("--addr={DEFAULT_PUB_KEY}")) + .arg("--world=world") + .assert() + .stdout(format!("\"{DEFAULT_PUB_KEY}\"\n")) + .success(); + + // Invoke it again without providing the contract, to exercise the deployment + sandbox + .new_assert_cmd("contract") + .arg("invoke") + .arg("--id") + .arg(id) + .arg("--") + .arg("auth") + .arg(&format!("--addr={DEFAULT_PUB_KEY}")) + .arg("--world=world") + .assert() + .stdout(format!("\"{DEFAULT_PUB_KEY}\"\n")) + .success(); +} + +async fn invoke_auth_with_identity(sandbox: &TestEnv, id: &str) { + sandbox + .cmd::("test -d ") + .run() + .await + .unwrap(); + sandbox + .new_assert_cmd("contract") + .arg("invoke") + .arg("--id") + .arg(id) + .arg("--wasm") + .arg(HELLO_WORLD.path()) + .arg("--") + .arg("auth") + .arg("--addr") + .arg(DEFAULT_PUB_KEY) + .arg("--world=world") + .assert() + .stdout(format!("\"{DEFAULT_PUB_KEY}\"\n")) + .success(); +} + +// fn invoke_auth_with_different_test_account(sandbox: &TestEnv, id: &str) { +// sandbox +// .new_assert_cmd("contract") +// .arg("invoke") +// .arg("--hd-path=1") +// .arg("--id") +// .arg(id) +// .arg("--wasm") +// .arg(HELLO_WORLD.path()) +// .arg("--") +// .arg("auth") +// .arg(&format!("--addr={DEFAULT_PUB_KEY_1}")) +// .arg("--world=world") +// .assert() +// .stdout(format!("\"{DEFAULT_PUB_KEY_1}\"\n")) +// .success(); +// } + +async fn invoke_auth_with_different_test_account_fail(sandbox: &TestEnv, id: &str) { + let res = sandbox + .invoke(&[ + "--hd-path=0", + "--id", + id, + &rpc_url_arg().unwrap_or_default(), + &network_passphrase_arg().unwrap_or_default(), + "--", + "auth", + &format!("--addr={DEFAULT_PUB_KEY_1}"), + "--world=world", + ]) + .await; + let e = res.unwrap_err(); + assert!( + matches!(e, contract::invoke::Error::Rpc(_)), + "Expected rpc error got {e:?}" + ); +} + +fn contract_data_read_failure(sandbox: &TestEnv, id: &str) { + sandbox + .new_assert_cmd("contract") + .arg("read") + .arg("--id") + .arg(id) + .arg("--key=COUNTER") + .arg("--durability=persistent") + .assert() + .failure() + .stderr( + "error: no matching contract data entries were found for the specified contract id\n", + ); +} + +#[tokio::test] +async fn contract_data_read() { + const KEY: &str = "COUNTER"; + let sandbox = &TestEnv::default(); + let id = &deploy_hello(sandbox); + let res = sandbox.invoke(&["--id", id, "--", "inc"]).await.unwrap(); + assert_eq!(res.trim(), "1"); + extend(sandbox, id, Some(KEY)).await; + + sandbox + .new_assert_cmd("contract") + .arg("read") + .arg("--id") + .arg(id) + .arg("--key") + .arg(KEY) + .arg("--durability=persistent") + .assert() + .success() + .stdout(predicates::str::starts_with("COUNTER,1")); + + sandbox + .new_assert_cmd("contract") + .arg("invoke") + .arg("--id") + .arg(id) + .arg("--") + .arg("inc") + .assert() + .success(); + + sandbox + .new_assert_cmd("contract") + .arg("read") + .arg("--id") + .arg(id) + .arg("--key") + .arg(KEY) + .arg("--durability=persistent") + .assert() + .success() + .stdout(predicates::str::starts_with("COUNTER,2")); +} + +async fn invoke_with_seed(sandbox: &TestEnv, id: &str) { + invoke_with_source(sandbox, DEFAULT_SEED_PHRASE, id).await; +} + +async fn invoke_with_sk(sandbox: &TestEnv, id: &str) { + invoke_with_source(sandbox, DEFAULT_SECRET_KEY, id).await; +} + +async fn invoke_with_id(sandbox: &TestEnv, id: &str) { + let identity = add_test_seed(sandbox.dir()); + invoke_with_source(sandbox, &identity, id).await; +} + +async fn invoke_with_source(sandbox: &TestEnv, source: &str, id: &str) { + let cmd = sandbox + .invoke(&[ + "--source-account", + source, + "--id", + id, + "--", + "hello", + "--world=world", + ]) + .await + .unwrap(); + assert_eq!(cmd, "[\"Hello\",\"world\"]"); +} + +async fn handles_kebab_case(e: &TestEnv, id: &str) { + assert!(e + .invoke(&["--id", id, "--", "multi-word-cmd", "--contract-owner=world",]) + .await + .is_ok()); +} + +async fn fetch(sandbox: &TestEnv, id: &str) { + let f = sandbox.dir().join("contract.wasm"); + let cmd = sandbox.cmd_arr::(&["--id", id, "--out-file", f.to_str().unwrap()]); + cmd.run().await.unwrap(); + assert!(f.exists()); +} + +async fn invoke_prng_u64_in_range_test(sandbox: &TestEnv, id: &str) { + assert!(sandbox + .invoke(&[ + "--id", + id, + "--wasm", + HELLO_WORLD.path().to_str().unwrap(), + "--", + "prng_u64_in_range", + "--low=0", + "--high=100", + ]) + .await + .is_ok()); +} diff --git a/cmd/crates/soroban-test/tests/it/integration/util.rs b/cmd/crates/soroban-test/tests/it/integration/util.rs new file mode 100644 index 00000000..ea27680b --- /dev/null +++ b/cmd/crates/soroban-test/tests/it/integration/util.rs @@ -0,0 +1,119 @@ +use soroban_cli::commands::contract; +use soroban_test::{TestEnv, Wasm}; +use std::{fmt::Display, path::Path}; + +use crate::util::{add_key, SecretKind}; + +pub const HELLO_WORLD: &Wasm = &Wasm::Custom("test-wasms", "test_hello_world"); +pub const CUSTOM_TYPES: &Wasm = &Wasm::Custom("test-wasms", "test_custom_types"); + +pub fn add_test_seed(dir: &Path) -> String { + let name = "test_seed"; + add_key( + dir, + name, + SecretKind::Seed, + "coral light army gather adapt blossom school alcohol coral light army giggle", + ); + name.to_owned() +} + +pub async fn invoke_with_roundtrip(e: &TestEnv, id: &str, func: &str, data: D) +where + D: Display, +{ + let data = data.to_string(); + println!("{data}"); + let res = e + .invoke(&["--id", id, "--", func, &format!("--{func}"), &data]) + .await + .unwrap(); + assert_eq!(res, data); +} + +pub const DEFAULT_PUB_KEY: &str = "GDIY6AQQ75WMD4W46EYB7O6UYMHOCGQHLAQGQTKHDX4J2DYQCHVCR4W4"; +pub const DEFAULT_SECRET_KEY: &str = "SC36BWNUOCZAO7DMEJNNKFV6BOTPJP7IG5PSHLUOLT6DZFRU3D3XGIXW"; + +pub const DEFAULT_PUB_KEY_1: &str = "GCKZUJVUNEFGD4HLFBUNVYM2QY2P5WQQZMGRA3DDL4HYVT5MW5KG3ODV"; +pub const TEST_SALT: &str = "f55ff16f66f43360266b95db6f8fec01d76031054306ae4a4b380598f6cfd114"; +pub const TEST_CONTRACT_ID: &str = "CBVTIVBYWAO2HNPNGKDCZW4OZYYESTKNGD7IPRTDGQSFJS4QBDQQJX3T"; + +pub fn rpc_url() -> Option { + std::env::var("SOROBAN_RPC_URL").ok() +} + +pub fn rpc_url_arg() -> Option { + rpc_url().map(|url| format!("--rpc-url={url}")) +} + +pub fn network_passphrase() -> Option { + std::env::var("SOROBAN_NETWORK_PASSPHRASE").ok() +} + +pub fn network_passphrase_arg() -> Option { + network_passphrase().map(|p| format!("--network-passphrase={p}")) +} + +pub fn deploy_hello(sandbox: &TestEnv) -> String { + deploy_contract(sandbox, HELLO_WORLD) +} + +pub fn deploy_custom(sandbox: &TestEnv) -> String { + deploy_contract(sandbox, CUSTOM_TYPES) +} + +pub fn deploy_contract(sandbox: &TestEnv, wasm: &Wasm) -> String { + let hash = wasm.hash().unwrap(); + sandbox + .new_assert_cmd("contract") + .arg("install") + .arg("--wasm") + .arg(wasm.path()) + .arg("--ignore-checks") + .assert() + .success() + .stdout(format!("{hash}\n")); + + sandbox + .new_assert_cmd("contract") + .arg("deploy") + .arg("--wasm-hash") + .arg(&format!("{hash}")) + .arg("--salt") + .arg(TEST_SALT) + .arg("--ignore-checks") + .assert() + .success() + .stdout(format!("{TEST_CONTRACT_ID}\n")); + TEST_CONTRACT_ID.to_string() +} + +pub async fn extend_contract(sandbox: &TestEnv, id: &str, wasm: &Wasm<'_>) { + extend(sandbox, id, None).await; + let cmd: contract::extend::Cmd = sandbox.cmd_arr(&[ + "--wasm-hash", + wasm.hash().unwrap().to_string().as_str(), + "--durability", + "persistent", + "--ledgers-to-extend", + "100000", + ]); + cmd.run().await.unwrap(); +} + +pub async fn extend(sandbox: &TestEnv, id: &str, value: Option<&str>) { + let mut args = vec![ + "--id", + id, + "--durability", + "persistent", + "--ledgers-to-extend", + "100000", + ]; + if let Some(value) = value { + args.push("--key"); + args.push(value); + } + let cmd: contract::extend::Cmd = sandbox.cmd_arr(&args); + cmd.run().await.unwrap(); +} diff --git a/cmd/crates/soroban-test/tests/it/integration/wrap.rs b/cmd/crates/soroban-test/tests/it/integration/wrap.rs new file mode 100644 index 00000000..a69e70c7 --- /dev/null +++ b/cmd/crates/soroban-test/tests/it/integration/wrap.rs @@ -0,0 +1,97 @@ +use soroban_cli::CommandParser; +use soroban_cli::{ + commands::{contract::deploy::asset, keys}, + utils::contract_id_hash_from_asset, +}; +use soroban_test::TestEnv; + +use super::util::network_passphrase; + +#[tokio::test] +#[ignore] +async fn burn() { + let sandbox = &TestEnv::default(); + let network_passphrase = network_passphrase().unwrap(); + println!("NETWORK_PASSPHRASE: {network_passphrase:?}"); + let address = keys::address::Cmd::parse("test") + .unwrap() + .public_key() + .unwrap(); + let asset = format!("native:{address}"); + wrap_cmd(&asset).run().await.unwrap(); + let asset = soroban_cli::utils::parsing::parse_asset(&asset).unwrap(); + let hash = contract_id_hash_from_asset(&asset, &network_passphrase).unwrap(); + let id = stellar_strkey::Contract(hash.0).to_string(); + assert_eq!( + "CAMTHSPKXZJIRTUXQP5QWJIFH3XIDMKLFAWVQOFOXPTKAW5GKV37ZC4N", + id + ); + assert_eq!( + "true", + sandbox + .invoke(&[ + "--id", + &id, + "--source=test", + "--", + "authorized", + "--id", + &address.to_string() + ]) + .await + .unwrap() + ); + assert_eq!( + "\"9223372036854775807\"", + sandbox + .invoke(&[ + "--id", + &id, + "--source", + "test", + "--", + "balance", + "--id", + &address.to_string() + ]) + .await + .unwrap(), + ); + + println!( + "{}", + sandbox + .invoke(&[ + "--id", + &id, + "--source=test", + "--", + "burn", + "--id", + &address.to_string(), + "--amount=100" + ]) + .await + .unwrap() + ); + + assert_eq!( + "\"9223372036854775707\"", + sandbox + .invoke(&[ + "--id", + &id, + "--source=test", + "--", + "balance", + "--id", + &address.to_string() + ]) + .await + .unwrap(), + ); +} + +fn wrap_cmd(asset: &str) -> asset::Cmd { + asset::Cmd::parse_arg_vec(&["--source=test", &format!("--asset={asset}")]).unwrap() +} diff --git a/cmd/crates/soroban-test/tests/it/lab_test_transaction_envelope.txt b/cmd/crates/soroban-test/tests/it/lab_test_transaction_envelope.txt new file mode 100644 index 00000000..6cd59769 --- /dev/null +++ b/cmd/crates/soroban-test/tests/it/lab_test_transaction_envelope.txt @@ -0,0 +1,54 @@ +TransactionEnvelope( + Tx( + TransactionV1Envelope { + tx: Transaction { + source_account: Ed25519( + Uint256(7376fde88e4cd61cc0fb294a1786b3f1d061f5f2f1ca57465faa932211b946d6), + ), + fee: 100, + seq_num: SequenceNumber( + 1, + ), + cond: Time( + TimeBounds { + min_time: TimePoint( + 0, + ), + max_time: TimePoint( + 0, + ), + }, + ), + memo: None, + operations: VecM( + [ + Operation { + source_account: None, + body: CreateAccount( + CreateAccountOp { + destination: AccountId( + PublicKeyTypeEd25519( + Uint256(d18f0210ff6cc1f2dcf1301fbbd4c30ee11a075820684d471df89d0f1011ea28), + ), + ), + starting_balance: 1000000000000, + }, + ), + }, + ], + ), + ext: V0, + }, + signatures: VecM( + [ + DecoratedSignature { + hint: SignatureHint(11b946d6), + signature: Signature( + BytesM(a004a6e9b64c687f3f62b4fde3b1797c35786106e5f97f16dd9afe3ed850df87dd736390501f62726f7e99af4ec358a8fb281cab9f811a43989b8085dd312609), + ), + }, + ], + ), + }, + ), +) diff --git a/cmd/crates/soroban-test/tests/it/main.rs b/cmd/crates/soroban-test/tests/it/main.rs new file mode 100644 index 00000000..a6b18cb2 --- /dev/null +++ b/cmd/crates/soroban-test/tests/it/main.rs @@ -0,0 +1,8 @@ +mod arg_parsing; +mod config; +mod help; +#[cfg(feature = "integration")] +mod integration; +mod plugin; +mod util; +mod version; diff --git a/cmd/crates/soroban-test/tests/it/plugin.rs b/cmd/crates/soroban-test/tests/it/plugin.rs new file mode 100644 index 00000000..7d55d1e5 --- /dev/null +++ b/cmd/crates/soroban-test/tests/it/plugin.rs @@ -0,0 +1,77 @@ +/* +This function calls the soroban executable via cargo and checks that the output +is correct. The PATH environment variable is set to include the target/bin +directory, so that the soroban executable can be found. +*/ + +use std::{ffi::OsString, path::PathBuf}; + +#[test] +fn soroban_hello() { + // Add the target/bin directory to the iterator of paths + let paths = get_paths(); + // Call soroban with the PATH variable set to include the target/bin directory + assert_cmd::Command::cargo_bin("soroban") + .unwrap_or_else(|_| assert_cmd::Command::new("soroban")) + .arg("hello") + .env("PATH", &paths) + .assert() + .stdout("Hello, world!\n"); +} + +#[test] +fn list() { + // Call `soroban --list` with the PATH variable set to include the target/bin directory + assert_cmd::Command::cargo_bin("soroban") + .unwrap_or_else(|_| assert_cmd::Command::new("soroban")) + .arg("--list") + .env("PATH", get_paths()) + .assert() + .stdout(predicates::str::contains("hello")); +} + +#[test] +#[cfg(not(unix))] +fn has_no_path() { + // Call soroban with the PATH variable set to include just target/bin directory + assert_cmd::Command::cargo_bin("soroban") + .unwrap_or_else(|_| assert_cmd::Command::new("soroban")) + .arg("hello") + .env("PATH", &target_bin()) + .assert() + .stdout("Hello, world!\n"); +} + +#[test] +fn has_no_path_failure() { + // Call soroban with the PATH variable set to include just target/bin directory + assert_cmd::Command::cargo_bin("soroban") + .unwrap_or_else(|_| assert_cmd::Command::new("soroban")) + .arg("hello") + .assert() + .stderr(predicates::str::contains("error: no such command: `hello`")); +} + +fn target_bin() -> PathBuf { + // Get the current working directory + let current_dir = std::env::current_dir().unwrap(); + + // Create a path to the target/bin directory + current_dir + .join("../../../target/bin") + .canonicalize() + .unwrap() +} + +fn get_paths() -> OsString { + let target_bin_path = target_bin(); + // Get the current PATH environment variable + let path_key = std::env::var_os("PATH"); + if let Some(path_key) = path_key { + // Create an iterator of paths from the PATH environment variable + let current_paths = std::env::split_paths(&path_key); + std::env::join_paths(current_paths.chain(vec![target_bin_path])).unwrap() + } else { + target_bin_path.into() + } +} diff --git a/cmd/crates/soroban-test/tests/it/util.rs b/cmd/crates/soroban-test/tests/it/util.rs new file mode 100644 index 00000000..6d625101 --- /dev/null +++ b/cmd/crates/soroban-test/tests/it/util.rs @@ -0,0 +1,70 @@ +use std::path::Path; + +use soroban_cli::commands::{ + config::{locator::KeyType, secret::Secret}, + contract, +}; +use soroban_test::{TestEnv, Wasm}; + +pub const CUSTOM_TYPES: &Wasm = &Wasm::Custom("test-wasms", "test_custom_types"); + +#[derive(Clone)] +pub enum SecretKind { + Seed, + Key, +} + +#[allow(clippy::needless_pass_by_value)] +pub fn add_key(dir: &Path, name: &str, kind: SecretKind, data: &str) { + let secret = match kind { + SecretKind::Seed => Secret::SeedPhrase { + seed_phrase: data.to_string(), + }, + SecretKind::Key => Secret::SecretKey { + secret_key: data.to_string(), + }, + }; + + KeyType::Identity + .write(name, &secret, &dir.join(".soroban")) + .unwrap(); +} + +pub fn add_test_id(dir: &Path) -> String { + let name = "test_id"; + add_key( + dir, + name, + SecretKind::Key, + "SBGWSG6BTNCKCOB3DIFBGCVMUPQFYPA2G4O34RMTB343OYPXU5DJDVMN", + ); + name.to_owned() +} + +pub const DEFAULT_SEED_PHRASE: &str = + "coral light army gather adapt blossom school alcohol coral light army giggle"; + +#[allow(dead_code)] +pub async fn invoke_custom( + sandbox: &TestEnv, + id: &str, + func: &str, + arg: &str, + wasm: &Path, +) -> Result { + let mut i: contract::invoke::Cmd = sandbox.cmd_arr(&[ + "--id", + id, + "--network", + "futurenet", + "--source", + "default", + "--", + func, + arg, + ]); + i.wasm = Some(wasm.to_path_buf()); + i.config.network.network = Some("futurenet".to_owned()); + i.invoke(&soroban_cli::commands::global::Args::default()) + .await +} diff --git a/cmd/crates/soroban-test/tests/it/version.rs b/cmd/crates/soroban-test/tests/it/version.rs new file mode 100644 index 00000000..cb7826fa --- /dev/null +++ b/cmd/crates/soroban-test/tests/it/version.rs @@ -0,0 +1,12 @@ +use soroban_cli::commands::version::long; +use soroban_test::TestEnv; + +#[test] +fn version() { + let sandbox = TestEnv::default(); + sandbox + .new_assert_cmd("version") + .assert() + .success() + .stdout(format!("soroban {}\n", long())); +} diff --git a/cmd/deptool/analyze.go b/cmd/deptool/analyze.go new file mode 100644 index 00000000..ec638905 --- /dev/null +++ b/cmd/deptool/analyze.go @@ -0,0 +1,350 @@ +package main + +import ( + "errors" + "fmt" + "strings" + "time" + + git "github.com/go-git/go-git/v5" + + "github.com/go-git/go-git/v5/plumbing" + "github.com/go-git/go-git/v5/plumbing/object" + "github.com/go-git/go-git/v5/plumbing/storer" + "github.com/go-git/go-git/v5/storage/memory" +) + +type analyzedProjectDependency struct { + projectDependency + branchName string + fullCommitHash string + latestBranchCommit string + latestBranchCommitTime time.Time + latestBranchVersion string + workspaceVersion bool // is the version is defined per workspace or package ? +} + +type analyzedDependencyFunc func(string, analyzedProjectDependency) + +func analyze(dependencies *projectDependencies, analyzedDependencyFunc analyzedDependencyFunc) map[string]analyzedProjectDependency { + out := make(map[string]analyzedProjectDependency) + +outerDependenciesLoop: + for pkg, depInfo := range dependencies.dependencyNames { + // check if we've already analyzed this project before + // ( since multiple dependencies might refer to the same repo) + for _, prevAnalyzedDep := range out { + if prevAnalyzedDep.githubPath == depInfo.githubPath && + prevAnalyzedDep.githubCommit == depInfo.githubCommit && + prevAnalyzedDep.workspaceVersion { + // yes, we did. + out[pkg] = analyzedProjectDependency{ + projectDependency: *depInfo, + branchName: prevAnalyzedDep.branchName, + fullCommitHash: prevAnalyzedDep.fullCommitHash, + latestBranchCommit: prevAnalyzedDep.latestBranchCommit, + latestBranchCommitTime: prevAnalyzedDep.latestBranchCommitTime, + workspaceVersion: prevAnalyzedDep.workspaceVersion, + latestBranchVersion: prevAnalyzedDep.latestBranchVersion, + } + if analyzedDependencyFunc != nil { + analyzedDependencyFunc(pkg, out[pkg]) + } + continue outerDependenciesLoop + } + } + out[pkg] = analyzedDependency(*depInfo) + + if analyzedDependencyFunc != nil { + analyzedDependencyFunc(pkg, out[pkg]) + } + } + + return out +} + +func analyzedDependency(depInfo projectDependency) analyzedProjectDependency { + path := depInfo.githubPath + if !strings.HasPrefix(path, "https://") { + path = "https://" + path + } + repo, err := git.Clone(memory.NewStorage(), nil, &git.CloneOptions{ + URL: path, + Tags: git.AllTags, + }) + if err != nil { + fmt.Printf("unable to clone repository at %s\n", path) + exitErr() + } + + revCommit, err := lookupShortCommit(repo, depInfo.githubCommit) + if err != nil { + exitErr() + } + + branches, err := getBranches(repo) + if err != nil { + exitErr() + } + + latestCommitRef, err := findBranchFromCommit(repo, branches, revCommit) + if err != nil { + exitErr() + } + if latestCommitRef == nil { + if err != nil { + fmt.Printf("unable to find parent branch for logged commit ?! : %v\n", err) + } else { + fmt.Printf("unable to find parent branch for logged commit %s on %s\n", revCommit.Hash.String(), path) + } + exitErr() + } + parentBranchName := strings.ReplaceAll(latestCommitRef.Name().String(), "refs/heads/", "") + + latestCommit, err := repo.CommitObject(latestCommitRef.Hash()) + if err != nil { + fmt.Printf("unable to get latest commit : %v\n", err) + exitErr() + } + + var updatedVersion string + var workspaceVersion bool + if depInfo.class == depClassCargo { + // for cargo versions, we need to look into the actual repository in order to determine + // the earliest version of the most up-to-date version. + latestCommit, updatedVersion, workspaceVersion, err = findLatestVersion(repo, latestCommitRef, revCommit, depInfo.name) + if err != nil { + exitErr() + } + } + + return analyzedProjectDependency{ + projectDependency: depInfo, + branchName: parentBranchName, + fullCommitHash: revCommit.Hash.String(), + latestBranchCommit: latestCommit.Hash.String(), + latestBranchCommitTime: latestCommit.Committer.When.UTC(), + latestBranchVersion: updatedVersion, + workspaceVersion: workspaceVersion, + } +} + +func findBranchFromCommit(repo *git.Repository, branches map[plumbing.Hash]*plumbing.Reference, revCommit *object.Commit) (branch *plumbing.Reference, err error) { + visited := make(map[plumbing.Hash]bool, 0) + for len(branches) > 0 { + for commit, branch := range branches { + if commit.String() == revCommit.Hash.String() { + // we found the branch. + return branch, nil + } + visited[commit] = true + delete(branches, commit) + + parentCommit, err := repo.CommitObject(commit) + if err != nil { + fmt.Printf("unable to get parent commit : %v\n", err) + return nil, err + } + for _, parent := range parentCommit.ParentHashes { + if !visited[parent] { + branches[parent] = branch + } + } + } + } + return nil, nil +} + +func lookupShortCommit(repo *git.Repository, shortCommit string) (revCommit *object.Commit, err error) { + cIter, err := repo.Log(&git.LogOptions{ + All: true, + }) + if err != nil { + fmt.Printf("unable to get log entries for %s: %v\n", shortCommit, err) + return nil, err + } + + // ... just iterates over the commits, looking for a commit with a specific hash. + lookoutCommit := strings.ToLower(shortCommit) + + err = cIter.ForEach(func(c *object.Commit) error { + revString := strings.ToLower(c.Hash.String()) + if strings.HasPrefix(revString, lookoutCommit) { + // found ! + revCommit = c + return storer.ErrStop + } + return nil + }) + if err != nil && err != storer.ErrStop { + fmt.Printf("unable to iterate on log entries : %v\n", err) + exitErr() + } + if revCommit == nil { + fmt.Printf("the commit object for short commit %s was missing ?!\n", lookoutCommit) + exitErr() + } + cIter.Close() + return revCommit, nil +} + +func getBranches(repo *git.Repository) (branches map[plumbing.Hash]*plumbing.Reference, err error) { + remoteOrigin, err := repo.Remote("origin") + if err != nil { + fmt.Printf("unable to retrieve origin remote : %v\n", err) + return nil, err + } + + remoteRefs, err := remoteOrigin.List(&git.ListOptions{}) + if err != nil { + fmt.Printf("unable to list remote refs : %v\n", err) + return nil, err + } + branchPrefix := "refs/heads/" + branches = make(map[plumbing.Hash]*plumbing.Reference, 0) + for _, remoteRef := range remoteRefs { + refName := remoteRef.Name().String() + if !strings.HasPrefix(refName, branchPrefix) { + continue + } + branches[remoteRef.Hash()] = remoteRef + } + return branches, nil +} + +func findLatestVersion(repo *git.Repository, latestCommitRef *plumbing.Reference, revCommit *object.Commit, pkgName string) (updatedLatestCommit *object.Commit, version string, workspaceVersion bool, err error) { + // create a list of all the commits between the head and the current. + commits := []*object.Commit{} + headCommit, err := repo.CommitObject(latestCommitRef.Hash()) + if err != nil { + return nil, "", false, err + } + for { + commits = append(commits, headCommit) + if headCommit.Hash == revCommit.Hash { + // we're done. + break + } + if parent, err := headCommit.Parent(0); err != nil || parent == nil { + break + } else { + headCommit = parent + } + } + + var versions []string + var workspaceVer []bool + for _, commit := range commits { + version, workspaceVersion, err := findCargoVersionForCommit(pkgName, commit) + if err != nil { + return nil, "", false, err + } + versions = append(versions, version) + workspaceVer = append(workspaceVer, workspaceVersion) + } + for i := 1; i < len(versions); i++ { + if versions[i] != versions[i-1] { + // the version at i-1 is "newer", so we should pick that one. + return commits[i-1], versions[i-1], workspaceVer[i-1], nil + } + } + + return commits[len(commits)-1], versions[len(commits)-1], workspaceVer[len(commits)-1], nil +} + +//lint:ignore funlen gocyclo +func findCargoVersionForCommit(pkgName string, commit *object.Commit) (string, bool, error) { + treeRoot, err := commit.Tree() + if err != nil { + return "", false, err + } + rootCargoFile, err := treeRoot.File("Cargo.toml") + if err != nil { + fmt.Printf("The package %s has unsupported repository structure\n", pkgName) + return "", false, errors.New("unsupported repository structure") + } + internalWorkspacePackage := false + + rootCargoFileLines, err := rootCargoFile.Lines() + if err != nil { + return "", false, err + } + var section string + var curPkgName string + for _, line := range rootCargoFileLines { + if strings.HasPrefix(line, "[") && strings.HasSuffix(line, "]") { + section = line[1 : len(line)-1] + continue + } + if strings.HasPrefix(line, "members") { + section = "members" + continue + } + switch section { + case "members": + if strings.Contains(line, pkgName) { + // this is a workspace that points to an internal member; + // the member is the package we're after. + internalWorkspacePackage = true + } + case "workspace.package": + lineParts := strings.Split(line, "=") + if len(lineParts) != 2 { + continue + } + if !strings.HasPrefix(lineParts[0], "version") { + continue + } + version := strings.ReplaceAll(strings.TrimSpace(lineParts[1]), "\"", "") + return version, true, nil + case "package": + lineParts := strings.Split(line, "=") + if len(lineParts) != 2 { + continue + } + if strings.HasPrefix(lineParts[0], "name") { + curPkgName = strings.ReplaceAll(strings.TrimSpace(lineParts[1]), "\"", "") + continue + } else if strings.HasPrefix(lineParts[0], "version") && curPkgName == pkgName { + version := strings.ReplaceAll(strings.TrimSpace(lineParts[1]), "\"", "") + return version, false, nil + } + } + } + // fall-back to package specific versioning. + + if internalWorkspacePackage { + pkgCargoFile, err := treeRoot.File(pkgName + "/Cargo.toml") + if err != nil { + return "", false, err + } + pkgCargoFileLines, err := pkgCargoFile.Lines() + if err != nil { + return "", false, err + } + var section string + var curPkgName string + for _, line := range pkgCargoFileLines { + if strings.HasPrefix(line, "[") && strings.HasSuffix(line, "]") { + section = line[1 : len(line)-1] + continue + } + switch section { + case "package": + lineParts := strings.Split(line, "=") + if len(lineParts) != 2 { + continue + } + if strings.HasPrefix(lineParts[0], "name") { + curPkgName = strings.ReplaceAll(strings.TrimSpace(lineParts[1]), "\"", "") + continue + } else if strings.HasPrefix(lineParts[0], "version") && curPkgName == pkgName { + version := strings.ReplaceAll(strings.TrimSpace(lineParts[1]), "\"", "") + return version, false, nil + } + } + } + } + fmt.Printf("The package %s has unsupported repository structure\n", pkgName) + return "", false, errors.New("unsupported repository structure") +} diff --git a/cmd/deptool/deptool.go b/cmd/deptool/deptool.go new file mode 100644 index 00000000..cc6599f2 --- /dev/null +++ b/cmd/deptool/deptool.go @@ -0,0 +1,85 @@ +package main + +import ( + "fmt" + "os" + + "github.com/spf13/cobra" +) + +var versionCheck bool +var projectDir string +var writeChanges bool +var writeChangesInPlace bool + +var rootCmd = &cobra.Command{ + Use: "deptool", + Short: "Repository dependency tool", + Long: `Repository dependency tool`, + Run: func(cmd *cobra.Command, args []string) { + if versionCheck { + fmt.Println("Build version: 1.0") + return + } + + //If no arguments passed, we should fallback to help + cmd.HelpFunc()(cmd, args) + }, +} + +var scanCmd = &cobra.Command{ + Use: "scan", + Short: "scan project dependencies", + Run: func(cmd *cobra.Command, args []string) { + deps := scanProject(projectDir) + printDependencies(deps) + }, +} + +var analyzeCmd = &cobra.Command{ + Use: "analyze", + Short: "analyze project dependencies", + Run: func(cmd *cobra.Command, args []string) { + deps := scanProject(projectDir) + analyzed := analyze(deps, analyzedDepPrinter) + hasChanges := false + // see if any of the dependencies could be upgraded. + for _, dep := range analyzed { + if dep.latestBranchCommit != dep.fullCommitHash { + // yes, it could be upgraded. + hasChanges = true + break + } + } + + if hasChanges { + if writeChanges || writeChangesInPlace { + writeUpdates(projectDir, analyzed, writeChangesInPlace) + } + os.Exit(1) + } + }, +} + +func initCommandHandlers() { + rootCmd.Flags().BoolVarP(&versionCheck, "version", "v", false, "Display and write current build version and exit") + scanCmd.Flags().StringVarP(&projectDir, "directory", "d", ".", "The directory where the project resides") + analyzeCmd.Flags().StringVarP(&projectDir, "directory", "d", ".", "The directory where the project resides") + analyzeCmd.Flags().BoolVarP(&writeChanges, "write", "w", false, "Once analysis is complete, write out the proposed change to Cargo.toml.proposed and go.mod.proposed") + analyzeCmd.Flags().BoolVarP(&writeChangesInPlace, "writeInPlace", "p", false, "Once analysis is complete, write out the changes to the existing Cargo.toml and go.mod") + + rootCmd.AddCommand(scanCmd) + rootCmd.AddCommand(analyzeCmd) +} + +func main() { + initCommandHandlers() + if err := rootCmd.Execute(); err != nil { + fmt.Println(err) + exitErr() + } +} + +func exitErr() { + os.Exit(-1) +} diff --git a/cmd/deptool/printer.go b/cmd/deptool/printer.go new file mode 100644 index 00000000..19c59095 --- /dev/null +++ b/cmd/deptool/printer.go @@ -0,0 +1,81 @@ +package main + +import "fmt" + +const ( + colorReset = "\033[0m" + //colorRed = "\033[31m" + colorGreen = "\033[32m" + colorYellow = "\033[33m" + //colorBlue = "\033[34m" + colorPurple = "\033[35m" + colorCyan = "\033[36m" + colorWhite = "\033[37m" +) + +func printDependencies(dependencies *projectDependencies) { + for _, dep := range dependencies.dependencies { + var version string + if dep.version != "" { + version = fmt.Sprintf(" %s%s", colorGreen, dep.version) + } + fmt.Printf("%s %s %s[%s%s%s@%s%s%s%s]%s\n", + colorGreen, + dep.name, + colorYellow, + colorCyan, + dep.githubPath, + colorWhite, + colorPurple, + dep.githubCommit, + version, + colorYellow, + colorReset) + } +} + +func analyzedDepPrinter(pkg string, dep analyzedProjectDependency) { + var version, latestBranchVersion string + if dep.version != "" { + version = fmt.Sprintf(" %s%s", colorGreen, dep.version) + } + // do we have an upgrade ? + if dep.fullCommitHash == dep.latestBranchCommit { + fmt.Printf("%s %s %s[%s%s%s@%s%s%s%s]%s\n", + colorGreen, + pkg, + colorYellow, + colorCyan, + dep.githubPath, + colorWhite, + colorPurple, + dep.githubCommit, + version, + colorYellow, + colorReset) + return + } + + if dep.latestBranchVersion != "" { + latestBranchVersion = fmt.Sprintf(" %s%s", colorGreen, dep.latestBranchVersion) + } + fmt.Printf("%s %s %s[%s%s%s@%s%s%s%s]%s Upgrade %s[%s%s%s%s]%s\n", + colorGreen, + pkg, + colorYellow, + colorCyan, + dep.githubPath, + colorWhite, + colorPurple, + dep.githubCommit, + version, + colorYellow, + colorReset, + colorYellow, + colorPurple, + dep.latestBranchCommit[:len(dep.githubCommit)], + latestBranchVersion, + colorYellow, + colorReset, + ) +} diff --git a/cmd/deptool/scanner.go b/cmd/deptool/scanner.go new file mode 100644 index 00000000..5045ff60 --- /dev/null +++ b/cmd/deptool/scanner.go @@ -0,0 +1,159 @@ +package main + +import ( + "fmt" + "os" + "path" + "sort" + "strings" + + toml "github.com/pelletier/go-toml" + modfile "golang.org/x/mod/modfile" +) + +const cargoTomlFile = "Cargo.toml" +const goModFile = "go.mod" + +type depClass int + +const ( + depClassCargo depClass = iota + depClassMod +) + +type projectDependencies struct { + dependencies []*projectDependency + dependencyNames map[string]*projectDependency +} + +type projectDependency struct { + class depClass + githubPath string + githubCommit string + direct bool + version string + name string +} + +type cargoDependencyToml struct { + Git string `toml:"git"` + Rev string `toml:"rev"` + Version string `toml:"version"` +} + +type workspaceDepenenciesToml struct { + Dependencies map[string]cargoDependencyToml `toml:"dependencies"` +} + +type patchCratesIOToml struct { + CratesIO map[string]cargoDependencyToml `toml:"crates-io"` +} + +type cargoToml struct { + Workspace workspaceDepenenciesToml // this is the workspace.dependencies entry; the toml decoder breaks it into workspace and dependencies + Patch patchCratesIOToml // this is the patch.crates-io entry +} + +func scanProject(dir string) *projectDependencies { + dependencies := &projectDependencies{ + dependencyNames: make(map[string]*projectDependency), + } + + loadParseCargoToml(dir, dependencies) + loadParseGoMod(dir, dependencies) + + return dependencies +} + +func loadParseCargoToml(dir string, dependencies *projectDependencies) { + cargoFileBytes, err := os.ReadFile(path.Join(dir, cargoTomlFile)) + if err != nil { + fmt.Printf("Unable to read Cargo.toml file : %v\n", err) + exitErr() + } + + var parsedCargo cargoToml + err = toml.Unmarshal(cargoFileBytes, &parsedCargo) + if err != nil { + fmt.Printf("Unable to parse Cargo.toml file : %v\n", err) + exitErr() + } + addTomlDependencies(dependencies, parsedCargo.Patch.CratesIO, false) + addTomlDependencies(dependencies, parsedCargo.Workspace.Dependencies, true) +} + +func addTomlDependencies(dependencies *projectDependencies, tomlDeps map[string]cargoDependencyToml, direct bool) { + names := make([]string, 0, len(tomlDeps)) + for name := range tomlDeps { + names = append(names, name) + } + sort.Strings(names) + for _, pkgName := range names { + crateGit := tomlDeps[pkgName] + if crateGit.Git == "" { + continue + } + + current := &projectDependency{ + class: depClassCargo, + githubPath: crateGit.Git, + githubCommit: crateGit.Rev, + version: crateGit.Version, + direct: direct, + name: pkgName, + } + if existing, has := dependencies.dependencyNames[pkgName]; has && (existing.githubCommit != current.githubCommit || existing.githubPath != current.githubPath) { + fmt.Printf("Conflicting entries in Cargo.toml file :\n%v\nvs.\n%v\n", existing, current) + exitErr() + } + if current.githubPath == "" { + continue + } + dependencies.dependencyNames[pkgName] = current + dependencies.dependencies = append(dependencies.dependencies, current) + } +} + +func loadParseGoMod(dir string, dependencies *projectDependencies) { + fileName := path.Join(dir, goModFile) + + cargoFileBytes, err := os.ReadFile(fileName) + if err != nil { + fmt.Printf("Unable to read go.mod file : %v\n", err) + exitErr() + } + + modFile, err := modfile.Parse("", cargoFileBytes, nil) + if err != nil { + fmt.Printf("Unable to read go.mod file : %v\n", err) + exitErr() + } + // scan all the stellar related required modules. + for _, require := range modFile.Require { + if !strings.Contains(require.Mod.Path, "github.com/stellar") || require.Indirect { + continue + } + splittedVersion := strings.Split(require.Mod.Version, "-") + if len(splittedVersion) != 3 { + continue + } + + pathComp := strings.Split(require.Mod.Path, "/") + pkgName := pathComp[len(pathComp)-1] + + current := &projectDependency{ + class: depClassMod, + githubPath: require.Mod.Path, + githubCommit: splittedVersion[2], + direct: true, + name: pkgName, + } + + if existing, has := dependencies.dependencyNames[pkgName]; has && (existing.githubCommit != current.githubCommit || existing.githubPath != current.githubPath) { + fmt.Printf("Conflicting entries in go.mod file :\n%v\nvs.\n%v\n", existing, current) + exitErr() + } + dependencies.dependencyNames[pkgName] = current + dependencies.dependencies = append(dependencies.dependencies, current) + } +} diff --git a/cmd/deptool/writeout.go b/cmd/deptool/writeout.go new file mode 100644 index 00000000..38e2e002 --- /dev/null +++ b/cmd/deptool/writeout.go @@ -0,0 +1,137 @@ +package main + +import ( + "bytes" + "fmt" + "os" + "path" + "strings" + + modfile "golang.org/x/mod/modfile" +) + +func writeUpdates(dir string, deps map[string]analyzedProjectDependency, inplace bool) { + writeUpdatesGoMod(dir, deps, inplace) + writeUpdatesCargoToml(dir, deps, inplace) +} + +func writeUpdatesGoMod(dir string, deps map[string]analyzedProjectDependency, inplace bool) { + fileName := path.Join(dir, goModFile) + + modFileBytes, err := os.ReadFile(fileName) + if err != nil { + fmt.Printf("Unable to read go.mod file : %v\n", err) + exitErr() + } + + modFile, err := modfile.Parse("", modFileBytes, nil) + if err != nil { + fmt.Printf("Unable to read go.mod file : %v\n", err) + exitErr() + } + + changed := false + for _, analyzed := range deps { + if analyzed.class != depClassMod { + continue + } + if analyzed.latestBranchCommit == analyzed.githubCommit { + continue + } + // find if we have entry in the mod file. + for _, req := range modFile.Require { + if req.Mod.Path != analyzed.githubPath { + continue + } + // this entry needs to be updated. + splittedVersion := strings.Split(req.Mod.Version, "-") + splittedVersion[2] = analyzed.latestBranchCommit[:len(splittedVersion[2])] + splittedVersion[1] = fmt.Sprintf("%04d%02d%02d%02d%02d%02d", + analyzed.latestBranchCommitTime.Year(), + analyzed.latestBranchCommitTime.Month(), + analyzed.latestBranchCommitTime.Day(), + analyzed.latestBranchCommitTime.Hour(), + analyzed.latestBranchCommitTime.Minute(), + analyzed.latestBranchCommitTime.Second()) + newVer := fmt.Sprintf("%s-%s-%s", splittedVersion[0], splittedVersion[1], splittedVersion[2]) + curPath := req.Mod.Path + err = modFile.DropRequire(req.Mod.Path) + if err != nil { + fmt.Printf("Unable to drop requirement : %v\n", err) + exitErr() + } + err = modFile.AddRequire(curPath, newVer) + if err != nil { + fmt.Printf("Unable to add requirement : %v\n", err) + exitErr() + } + changed = true + } + } + + if !changed { + return + } + + outputBytes, err := modFile.Format() + if err != nil { + fmt.Printf("Unable to format mod file : %v\n", err) + exitErr() + } + if !inplace { + fileName += ".proposed" + } + err = os.WriteFile(fileName, outputBytes, 0200) + if err != nil { + fmt.Printf("Unable to write %s file : %v\n", fileName, err) + exitErr() + } + err = os.Chmod(fileName, 0644) + if err != nil { + fmt.Printf("Unable to chmod %s file : %v\n", fileName, err) + exitErr() + } +} + +func writeUpdatesCargoToml(dir string, deps map[string]analyzedProjectDependency, inplace bool) { + fileName := path.Join(dir, cargoTomlFile) + + modFileBytes, err := os.ReadFile(fileName) + if err != nil { + fmt.Printf("Unable to read go.mod file : %v\n", err) + exitErr() + } + + changed := false + for _, analyzed := range deps { + if analyzed.class != depClassCargo { + continue + } + if analyzed.latestBranchCommit == analyzed.githubCommit { + continue + } + newCommit := analyzed.latestBranchCommit[:len(analyzed.githubCommit)] + // we want to replace every instance of analyzed.githubCommit with newCommit + modFileBytes = bytes.ReplaceAll(modFileBytes, []byte(analyzed.githubCommit), []byte(newCommit)) + + // set the changed flag + changed = true + } + + if !changed { + return + } + if !inplace { + fileName = fileName + ".proposed" + } + err = os.WriteFile(fileName, modFileBytes, 0200) + if err != nil { + fmt.Printf("Unable to write %s file : %v\n", fileName, err) + exitErr() + } + err = os.Chmod(fileName, 0644) + if err != nil { + fmt.Printf("Unable to chmod %s file : %v\n", fileName, err) + exitErr() + } +} diff --git a/cmd/soroban-cli/Cargo.toml b/cmd/soroban-cli/Cargo.toml new file mode 100644 index 00000000..ad60c827 --- /dev/null +++ b/cmd/soroban-cli/Cargo.toml @@ -0,0 +1,106 @@ +[package] +name = "soroban-cli" +description = "Soroban CLI" +homepage = "https://github.com/stellar/soroban-cli" +repository = "https://github.com/stellar/soroban-cli" +authors = ["Stellar Development Foundation "] +license = "Apache-2.0" +readme = "README.md" +version = "20.2.0" +edition = "2021" +rust-version.workspace = true +autobins = false +default-run = "soroban" + +[[bin]] +name = "soroban" +path = "src/bin/main.rs" + +[package.metadata.binstall] +pkg-url = "{ repo }/releases/download/v{ version }/{ name }-{ version }-{ target }{ archive-suffix }" +bin-dir = "{ bin }{ binary-ext }" + +[[bin]] +name = "doc-gen" +path = "src/bin/doc-gen.rs" +required-features = ["clap-markdown"] + +[lib] +name = "soroban_cli" +path = "src/lib.rs" +doctest = false + +[features] +default = [] +opt = ["dep:wasm-opt"] + +[dependencies] +stellar-xdr = { workspace = true, features = ["cli"] } +soroban-env-host = { workspace = true } +soroban-spec = { workspace = true } +soroban-spec-json = { workspace = true } +soroban-spec-rust = { workspace = true } +soroban-spec-tools = { workspace = true } +soroban-spec-typescript = { workspace = true } +soroban-ledger-snapshot = { workspace = true } +stellar-strkey = { workspace = true } +soroban-sdk = { workspace = true } +clap = { version = "4.1.8", features = [ + "derive", + "env", + "deprecated", + "string", +] } +base64 = { workspace = true } +thiserror = { workspace = true } +serde = "1.0.82" +serde_derive = "1.0.82" +serde_json = "1.0.82" +serde-aux = "4.1.2" +hex = { workspace = true } +num-bigint = "0.4" +tokio = { version = "1", features = ["full"] } +termcolor = "1.1.3" +termcolor_output = "1.0.1" +clap_complete = "4.1.4" +rand = "0.8.5" +wasmparser = { workspace = true } +sha2 = { workspace = true } +csv = "1.1.6" +ed25519-dalek = "=2.0.0" +jsonrpsee-http-client = "0.20.1" +jsonrpsee-core = "0.20.1" +hyper = "0.14.27" +hyper-tls = "0.5" +http = "0.2.9" +regex = "1.6.0" +wasm-opt = { version = "0.114.0", optional = true } +chrono = "0.4.27" +rpassword = "7.2.0" +dirs = "4.0.0" +toml = "0.5.9" +itertools = "0.10.5" +shlex = "1.1.0" +sep5 = { workspace = true } +ethnum = { workspace = true } +clap-markdown = { version = "0.1.3", optional = true } +which = { workspace = true, features = ["regex"] } +strsim = "0.10.0" +heck = "0.4.1" +tracing = { workspace = true } +tracing-appender = { workspace = true } +tracing-subscriber = { workspace = true, features = ["env-filter"] } +cargo_metadata = "0.15.4" +pathdiff = "0.2.1" +dotenvy = "0.15.7" +# For hyper-tls +[target.'cfg(unix)'.dependencies] +openssl = { version = "0.10.55", features = ["vendored"] } + +[build-dependencies] +crate-git-revision = "0.0.4" + +[dev-dependencies] +assert_cmd = "2.0.4" +assert_fs = "1.0.7" +predicates = "2.1.5" diff --git a/cmd/soroban-cli/README.md b/cmd/soroban-cli/README.md new file mode 100644 index 00000000..d17261b1 --- /dev/null +++ b/cmd/soroban-cli/README.md @@ -0,0 +1,28 @@ +# soroban-cli + +CLI for running Soroban contracts locally in a test VM. Executes WASM files built using the [rs-soroban-sdk](https://github.com/stellar/rs-soroban-sdk). + +Soroban: https://soroban.stellar.org + +## Install + +``` +cargo install --locked soroban-cli +``` + +To install with the `opt` feature, which includes a WASM optimization feature and wasm-opt built in: + +``` +cargo install --locked soroban-cli --features opt +``` + +## Usage + +Can invoke a contract method as a subcommand with different arguments. Anything after the slop (`--`) is passed to the contract's CLI. You can use `--help` to learn about which methods are available and what their arguments are including an example of the type of the input. + +## Example + +``` +soroban invoke --id --wasm -- --help +soroban invoke --id --network futurenet -- --help +``` diff --git a/cmd/soroban-cli/build.rs b/cmd/soroban-cli/build.rs new file mode 100644 index 00000000..b6e6dd92 --- /dev/null +++ b/cmd/soroban-cli/build.rs @@ -0,0 +1,3 @@ +fn main() { + crate_git_revision::init(); +} diff --git a/cmd/soroban-cli/src/bin/doc-gen.rs b/cmd/soroban-cli/src/bin/doc-gen.rs new file mode 100644 index 00000000..096f9681 --- /dev/null +++ b/cmd/soroban-cli/src/bin/doc-gen.rs @@ -0,0 +1,36 @@ +use std::{ + env, fs, + path::{Path, PathBuf}, +}; + +type DynError = Box; + +fn main() -> Result<(), DynError> { + doc_gen()?; + Ok(()) +} + +fn doc_gen() -> std::io::Result<()> { + let out_dir = docs_dir(); + + fs::create_dir_all(out_dir.clone())?; + + std::fs::write( + out_dir.join("soroban-cli-full-docs.md"), + clap_markdown::help_markdown::(), + )?; + + Ok(()) +} + +fn project_root() -> PathBuf { + Path::new(&env!("CARGO_MANIFEST_DIR")) + .ancestors() + .nth(2) + .unwrap() + .to_path_buf() +} + +fn docs_dir() -> PathBuf { + project_root().join("docs") +} diff --git a/cmd/soroban-cli/src/bin/main.rs b/cmd/soroban-cli/src/bin/main.rs new file mode 100644 index 00000000..7a87099c --- /dev/null +++ b/cmd/soroban-cli/src/bin/main.rs @@ -0,0 +1,51 @@ +use clap::CommandFactory; +use dotenvy::dotenv; +use tracing_subscriber::{fmt, EnvFilter}; + +use soroban_cli::{commands, Root}; + +#[tokio::main] +async fn main() { + let _ = dotenv().unwrap_or_default(); + let mut root = Root::new().unwrap_or_else(|e| match e { + commands::Error::Clap(e) => { + let mut cmd = Root::command(); + e.format(&mut cmd).exit(); + } + e => { + eprintln!("{e}"); + std::process::exit(1); + } + }); + // Now use root to setup the logger + if let Some(level) = root.global_args.log_level() { + let mut e_filter = EnvFilter::from_default_env() + .add_directive("hyper=off".parse().unwrap()) + .add_directive(format!("soroban_cli={level}").parse().unwrap()); + + for filter in &root.global_args.filter_logs { + e_filter = e_filter.add_directive( + filter + .parse() + .map_err(|e| { + eprintln!("{e}: {filter}"); + std::process::exit(1); + }) + .unwrap(), + ); + } + + let builder = fmt::Subscriber::builder() + .with_env_filter(e_filter) + .with_writer(std::io::stderr); + + let subscriber = builder.finish(); + tracing::subscriber::set_global_default(subscriber) + .expect("Failed to set the global tracing subscriber"); + } + + if let Err(e) = root.run().await { + eprintln!("error: {e}"); + std::process::exit(1); + } +} diff --git a/cmd/soroban-cli/src/commands/completion.rs b/cmd/soroban-cli/src/commands/completion.rs new file mode 100644 index 00000000..f64386b4 --- /dev/null +++ b/cmd/soroban-cli/src/commands/completion.rs @@ -0,0 +1,32 @@ +use clap::{arg, CommandFactory, Parser}; +use clap_complete::{generate, Shell}; +use std::io; + +use crate::commands::Root; + +pub const LONG_ABOUT: &str = "\ +Print shell completion code for the specified shell + +Ensure the completion package for your shell is installed, +e.g., bash-completion for bash. + +To enable autocomplete in the current bash shell, run: + source <(soroban completion --shell bash) + +To enable autocomplete permanently, run: + echo \"source <(soroban completion --shell bash)\" >> ~/.bashrc"; + +#[derive(Parser, Debug, Clone)] +#[group(skip)] +pub struct Cmd { + /// The shell type + #[arg(long, value_enum)] + shell: Shell, +} + +impl Cmd { + pub fn run(&self) { + let cmd = &mut Root::command(); + generate(self.shell, cmd, "soroban", &mut io::stdout()); + } +} diff --git a/cmd/soroban-cli/src/commands/config/locator.rs b/cmd/soroban-cli/src/commands/config/locator.rs new file mode 100644 index 00000000..2688b043 --- /dev/null +++ b/cmd/soroban-cli/src/commands/config/locator.rs @@ -0,0 +1,358 @@ +use clap::arg; +use serde::de::DeserializeOwned; +use std::{ + ffi::OsStr, + fmt::Display, + fs, io, + path::{Path, PathBuf}, + str::FromStr, +}; + +use crate::{utils::find_config_dir, Pwd}; + +use super::{network::Network, secret::Secret}; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("Failed to find home directory")] + HomeDirNotFound, + #[error("Failed read current directory")] + CurrentDirNotFound, + #[error("Failed read current directory and no SOROBAN_CONFIG_HOME is set")] + NoConfigEnvVar, + #[error("Failed to create directory: {path:?}")] + DirCreationFailed { path: PathBuf }, + #[error( + "Failed to read secret's file: {path}.\nProbably need to use `soroban config identity add`" + )] + SecretFileRead { path: PathBuf }, + #[error( + "Failed to read network file: {path};\nProbably need to use `soroban config network add`" + )] + NetworkFileRead { path: PathBuf }, + #[error(transparent)] + Toml(#[from] toml::de::Error), + #[error("Seceret file failed to deserialize")] + Deserialization, + #[error("Failed to write identity file:{filepath}: {error}")] + IdCreationFailed { filepath: PathBuf, error: io::Error }, + #[error("Seceret file failed to deserialize")] + NetworkDeserialization, + #[error("Failed to write network file: {0}")] + NetworkCreationFailed(std::io::Error), + #[error("Error Identity directory is invalid: {name}")] + IdentityList { name: String }, + // #[error("Config file failed to deserialize")] + // CannotReadConfigFile, + #[error("Config file failed to serialize")] + ConfigSerialization, + // #[error("Config file failed write")] + // CannotWriteConfigFile, + #[error("XDG_CONFIG_HOME env variable is not a valid path. Got {0}")] + XdgConfigHome(String), + #[error(transparent)] + Io(#[from] std::io::Error), + #[error("Failed to remove {0}: {1}")] + ConfigRemoval(String, String), + #[error("Failed to find config {0} for {1}")] + ConfigMissing(String, String), + #[error(transparent)] + String(#[from] std::string::FromUtf8Error), + #[error(transparent)] + Secret(#[from] crate::commands::config::secret::Error), +} + +#[derive(Debug, clap::Args, Default, Clone)] +#[group(skip)] +pub struct Args { + /// Use global config + #[arg(long)] + pub global: bool, + + /// Location of config directory, default is "." + #[arg(long, help_heading = "TESTING_OPTIONS")] + pub config_dir: Option, +} + +pub enum Location { + Local(PathBuf), + Global(PathBuf), +} + +impl Display for Location { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{} {:?}", + match self { + Location::Local(_) => "Local", + Location::Global(_) => "Global", + }, + self.as_ref().parent().unwrap().parent().unwrap() + ) + } +} + +impl AsRef for Location { + fn as_ref(&self) -> &Path { + match self { + Location::Local(p) | Location::Global(p) => p.as_path(), + } + } +} + +impl Location { + #[must_use] + pub fn wrap(&self, p: PathBuf) -> Self { + match self { + Location::Local(_) => Location::Local(p), + Location::Global(_) => Location::Global(p), + } + } +} + +impl Args { + pub fn config_dir(&self) -> Result { + if self.global { + global_config_path() + } else { + self.local_config() + } + } + + pub fn local_and_global(&self) -> Result<[Location; 2], Error> { + Ok([ + Location::Local(self.local_config()?), + Location::Global(global_config_path()?), + ]) + } + + pub fn local_config(&self) -> Result { + let pwd = self.current_dir()?; + Ok(find_config_dir(pwd.clone()).unwrap_or_else(|_| pwd.join(".soroban"))) + } + + pub fn current_dir(&self) -> Result { + self.config_dir.as_ref().map_or_else( + || std::env::current_dir().map_err(|_| Error::CurrentDirNotFound), + |pwd| Ok(pwd.clone()), + ) + } + + pub fn write_identity(&self, name: &str, secret: &Secret) -> Result<(), Error> { + KeyType::Identity.write(name, secret, &self.config_dir()?) + } + + pub fn write_network(&self, name: &str, network: &Network) -> Result<(), Error> { + KeyType::Network.write(name, network, &self.config_dir()?) + } + + pub fn list_identities(&self) -> Result, Error> { + Ok(KeyType::Identity + .list_paths(&self.local_and_global()?)? + .into_iter() + .map(|(name, _)| name) + .collect()) + } + + pub fn list_identities_long(&self) -> Result, Error> { + Ok(KeyType::Identity + .list_paths(&self.local_and_global()?) + .into_iter() + .flatten() + .map(|(name, location)| { + let path = match location { + Location::Local(path) | Location::Global(path) => path, + }; + (name, format!("{}", path.display())) + }) + .collect()) + } + + pub fn list_networks(&self) -> Result, Error> { + Ok(KeyType::Network + .list_paths(&self.local_and_global()?) + .into_iter() + .flatten() + .map(|x| x.0) + .collect()) + } + + pub fn list_networks_long(&self) -> Result, Error> { + Ok(KeyType::Network + .list_paths(&self.local_and_global()?) + .into_iter() + .flatten() + .filter_map(|(name, location)| { + Some(( + name, + KeyType::read_from_path::(location.as_ref()).ok()?, + location, + )) + }) + .collect::>()) + } + pub fn read_identity(&self, name: &str) -> Result { + KeyType::Identity.read_with_global(name, &self.local_config()?) + } + + pub fn read_network(&self, name: &str) -> Result { + let res = KeyType::Network.read_with_global(name, &self.local_config()?); + if let Err(Error::ConfigMissing(_, _)) = &res { + if name == "futurenet" { + let network = Network::futurenet(); + self.write_network(name, &network)?; + return Ok(network); + } + } + res + } + + pub fn remove_identity(&self, name: &str) -> Result<(), Error> { + KeyType::Identity.remove(name, &self.config_dir()?) + } + + pub fn remove_network(&self, name: &str) -> Result<(), Error> { + KeyType::Network.remove(name, &self.config_dir()?) + } +} + +fn ensure_directory(dir: PathBuf) -> Result { + let parent = dir.parent().ok_or(Error::HomeDirNotFound)?; + std::fs::create_dir_all(parent).map_err(|_| dir_creation_failed(parent))?; + Ok(dir) +} + +fn dir_creation_failed(p: &Path) -> Error { + Error::DirCreationFailed { + path: p.to_path_buf(), + } +} + +fn read_dir(dir: &Path) -> Result, Error> { + let contents = std::fs::read_dir(dir)?; + let mut res = vec![]; + for entry in contents.filter_map(Result::ok) { + let path = entry.path(); + if let Some("toml") = path.extension().and_then(OsStr::to_str) { + if let Some(os_str) = path.file_stem() { + res.push((os_str.to_string_lossy().trim().to_string(), path)); + } + } + } + res.sort(); + Ok(res) +} + +pub enum KeyType { + Identity, + Network, +} + +impl Display for KeyType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{}", + match self { + KeyType::Identity => "identity", + KeyType::Network => "network", + } + ) + } +} + +impl KeyType { + pub fn read(&self, key: &str, pwd: &Path) -> Result { + let path = self.path(pwd, key); + Self::read_from_path(&path) + } + + pub fn read_from_path(path: &Path) -> Result { + let data = fs::read(path).map_err(|_| Error::NetworkFileRead { + path: path.to_path_buf(), + })?; + let res = toml::from_slice(data.as_slice()); + Ok(res?) + } + + pub fn read_with_global(&self, key: &str, pwd: &Path) -> Result { + for path in [pwd, global_config_path()?.as_path()] { + match self.read(key, path) { + Ok(t) => return Ok(t), + _ => continue, + } + } + Err(Error::ConfigMissing(self.to_string(), key.to_string())) + } + + pub fn write( + &self, + key: &str, + value: &T, + pwd: &Path, + ) -> Result<(), Error> { + let filepath = ensure_directory(self.path(pwd, key))?; + let data = toml::to_string(value).map_err(|_| Error::ConfigSerialization)?; + std::fs::write(&filepath, data).map_err(|error| Error::IdCreationFailed { filepath, error }) + } + + fn root(&self, pwd: &Path) -> PathBuf { + pwd.join(self.to_string()) + } + + fn path(&self, pwd: &Path, key: &str) -> PathBuf { + let mut path = self.root(pwd).join(key); + path.set_extension("toml"); + path + } + + pub fn list_paths(&self, paths: &[Location]) -> Result, Error> { + Ok(paths + .iter() + .flat_map(|p| self.list(p).unwrap_or_default()) + .collect()) + } + + pub fn list(&self, pwd: &Location) -> Result, Error> { + let path = self.root(pwd.as_ref()); + if path.exists() { + let mut files = read_dir(&path)?; + files.sort(); + + Ok(files + .into_iter() + .map(|(name, p)| (name, pwd.wrap(p))) + .collect()) + } else { + Ok(vec![]) + } + } + + pub fn remove(&self, key: &str, pwd: &Path) -> Result<(), Error> { + let path = self.path(pwd, key); + if path.exists() { + std::fs::remove_file(&path) + .map_err(|_| Error::ConfigRemoval(self.to_string(), key.to_string())) + } else { + Ok(()) + } + } +} + +fn global_config_path() -> Result { + Ok(if let Ok(config_home) = std::env::var("XDG_CONFIG_HOME") { + PathBuf::from_str(&config_home).map_err(|_| Error::XdgConfigHome(config_home))? + } else { + dirs::home_dir() + .ok_or(Error::HomeDirNotFound)? + .join(".config") + } + .join("soroban")) +} + +impl Pwd for Args { + fn set_pwd(&mut self, pwd: &Path) { + self.config_dir = Some(pwd.to_path_buf()); + } +} diff --git a/cmd/soroban-cli/src/commands/config/mod.rs b/cmd/soroban-cli/src/commands/config/mod.rs new file mode 100644 index 00000000..be76e77f --- /dev/null +++ b/cmd/soroban-cli/src/commands/config/mod.rs @@ -0,0 +1,95 @@ +use std::path::PathBuf; + +use clap::{arg, command, Parser}; +use serde::{Deserialize, Serialize}; + +use crate::Pwd; + +use self::{network::Network, secret::Secret}; + +use super::{keys, network}; + +pub mod locator; +pub mod secret; + +#[derive(Debug, Parser)] +pub enum Cmd { + /// Configure different networks. Depraecated, use `soroban network` instead. + #[command(subcommand)] + Network(network::Cmd), + /// Identity management. Deprecated, use `soroban keys` instead. + #[command(subcommand)] + Identity(keys::Cmd), +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Identity(#[from] keys::Error), + #[error(transparent)] + Network(#[from] network::Error), + #[error(transparent)] + Secret(#[from] secret::Error), + #[error(transparent)] + Config(#[from] locator::Error), +} + +impl Cmd { + pub async fn run(&self) -> Result<(), Error> { + match &self { + Cmd::Identity(identity) => identity.run().await?, + Cmd::Network(network) => network.run()?, + } + Ok(()) + } +} + +#[derive(Debug, clap::Args, Clone, Default)] +#[group(skip)] +pub struct Args { + #[command(flatten)] + pub network: network::Args, + + #[arg(long, visible_alias = "source", env = "SOROBAN_ACCOUNT")] + /// Account that signs the final transaction. Alias `source`. Can be an identity (--source alice), a secret key (--source SC36…), or a seed phrase (--source "kite urban…"). Default: `identity generate --default-seed` + pub source_account: String, + + #[arg(long)] + /// If using a seed phrase, which hierarchical deterministic path to use, e.g. `m/44'/148'/{hd_path}`. Example: `--hd-path 1`. Default: `0` + pub hd_path: Option, + + #[command(flatten)] + pub locator: locator::Args, +} + +impl Args { + pub fn key_pair(&self) -> Result { + let key = self.account(&self.source_account)?; + Ok(key.key_pair(self.hd_path)?) + } + + pub fn account(&self, account_str: &str) -> Result { + if let Ok(secret) = self.locator.read_identity(account_str) { + Ok(secret) + } else { + Ok(account_str.parse::()?) + } + } + + pub fn get_network(&self) -> Result { + Ok(self.network.get(&self.locator)?) + } + + pub fn config_dir(&self) -> Result { + Ok(self.locator.config_dir()?) + } +} + +impl Pwd for Args { + fn set_pwd(&mut self, pwd: &std::path::Path) { + self.locator.set_pwd(pwd); + } +} + +#[derive(Default, Serialize, Deserialize)] +pub struct Config {} diff --git a/cmd/soroban-cli/src/commands/config/secret.rs b/cmd/soroban-cli/src/commands/config/secret.rs new file mode 100644 index 00000000..4684e2a8 --- /dev/null +++ b/cmd/soroban-cli/src/commands/config/secret.rs @@ -0,0 +1,143 @@ +use clap::arg; +use serde::{Deserialize, Serialize}; +use std::{io::Write, str::FromStr}; +use stellar_strkey::ed25519::{PrivateKey, PublicKey}; + +use crate::utils; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("invalid secret key")] + InvalidSecretKey, + // #[error("seed_phrase must be 12 words long, found {len}")] + // InvalidSeedPhrase { len: usize }, + #[error("seceret input error")] + PasswordRead, + #[error(transparent)] + Secret(#[from] stellar_strkey::DecodeError), + #[error(transparent)] + SeedPhrase(#[from] sep5::error::Error), + #[error(transparent)] + Ed25519(#[from] ed25519_dalek::SignatureError), + #[error("Invalid address {0}")] + InvalidAddress(String), +} + +#[derive(Debug, clap::Args, Clone)] +#[group(skip)] +pub struct Args { + /// Add using secret_key + /// Can provide with SOROBAN_SECRET_KEY + #[arg(long, conflicts_with = "seed_phrase")] + pub secret_key: bool, + /// Add using 12 word seed phrase to generate secret_key + #[arg(long, conflicts_with = "secret_key")] + pub seed_phrase: bool, +} + +impl Args { + pub fn read_secret(&self) -> Result { + if let Ok(secret_key) = std::env::var("SOROBAN_SECRET_KEY") { + Ok(Secret::SecretKey { secret_key }) + } else if self.secret_key { + println!("Type a secret key: "); + let secret_key = read_password()?; + let secret_key = PrivateKey::from_string(&secret_key) + .map_err(|_| Error::InvalidSecretKey)? + .to_string(); + Ok(Secret::SecretKey { secret_key }) + } else if self.seed_phrase { + println!("Type a 12 word seed phrase: "); + let seed_phrase = read_password()?; + let seed_phrase: Vec<&str> = seed_phrase.split_whitespace().collect(); + // if seed_phrase.len() != 12 { + // let len = seed_phrase.len(); + // return Err(Error::InvalidSeedPhrase { len }); + // } + Ok(Secret::SeedPhrase { + seed_phrase: seed_phrase + .into_iter() + .map(ToString::to_string) + .collect::>() + .join(" "), + }) + } else { + Err(Error::PasswordRead {}) + } + } +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(untagged)] +pub enum Secret { + SecretKey { secret_key: String }, + SeedPhrase { seed_phrase: String }, +} + +impl FromStr for Secret { + type Err = Error; + + fn from_str(s: &str) -> Result { + if PrivateKey::from_string(s).is_ok() { + Ok(Secret::SecretKey { + secret_key: s.to_string(), + }) + } else if sep5::SeedPhrase::from_str(s).is_ok() { + Ok(Secret::SeedPhrase { + seed_phrase: s.to_string(), + }) + } else { + Err(Error::InvalidAddress(s.to_string())) + } + } +} + +impl From for Secret { + fn from(value: PrivateKey) -> Self { + Secret::SecretKey { + secret_key: value.to_string(), + } + } +} + +impl Secret { + pub fn private_key(&self, index: Option) -> Result { + Ok(match self { + Secret::SecretKey { secret_key } => PrivateKey::from_string(secret_key)?, + Secret::SeedPhrase { seed_phrase } => sep5::SeedPhrase::from_str(seed_phrase)? + .from_path_index(index.unwrap_or_default(), None)? + .private(), + }) + } + + pub fn public_key(&self, index: Option) -> Result { + let key = self.key_pair(index)?; + Ok(stellar_strkey::ed25519::PublicKey::from_payload( + key.verifying_key().as_bytes(), + )?) + } + + pub fn key_pair(&self, index: Option) -> Result { + Ok(utils::into_signing_key(&self.private_key(index)?)) + } + + pub fn from_seed(seed: Option<&str>) -> Result { + let seed_phrase = if let Some(seed) = seed.map(str::as_bytes) { + sep5::SeedPhrase::from_entropy(seed) + } else { + sep5::SeedPhrase::random(sep5::MnemonicType::Words12) + }? + .seed_phrase + .into_phrase(); + Ok(Secret::SeedPhrase { seed_phrase }) + } + + pub fn test_seed_phrase() -> Result { + Self::from_seed(Some("0000000000000000")) + } +} + +fn read_password() -> Result { + std::io::stdout().flush().map_err(|_| Error::PasswordRead)?; + rpassword::read_password().map_err(|_| Error::PasswordRead) +} diff --git a/cmd/soroban-cli/src/commands/contract/asset.rs b/cmd/soroban-cli/src/commands/contract/asset.rs new file mode 100644 index 00000000..ad7be020 --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/asset.rs @@ -0,0 +1,27 @@ +use super::{deploy, id}; + +#[derive(Debug, clap::Subcommand)] +pub enum Cmd { + /// Get Id of builtin Soroban Asset Contract. Deprecated, use `soroban contract id asset` instead + Id(id::asset::Cmd), + /// Deploy builtin Soroban Asset Contract + Deploy(deploy::asset::Cmd), +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Id(#[from] id::asset::Error), + #[error(transparent)] + Deploy(#[from] deploy::asset::Error), +} + +impl Cmd { + pub async fn run(&self) -> Result<(), Error> { + match &self { + Cmd::Id(id) => id.run()?, + Cmd::Deploy(asset) => asset.run().await?, + } + Ok(()) + } +} diff --git a/cmd/soroban-cli/src/commands/contract/bindings.rs b/cmd/soroban-cli/src/commands/contract/bindings.rs new file mode 100644 index 00000000..1da94697 --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/bindings.rs @@ -0,0 +1,38 @@ +pub mod json; +pub mod rust; +pub mod typescript; + +#[derive(Debug, clap::Subcommand)] +pub enum Cmd { + /// Generate Json Bindings + Json(json::Cmd), + + /// Generate Rust bindings + Rust(rust::Cmd), + + /// Generate a TypeScript / JavaScript package + Typescript(typescript::Cmd), +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Json(#[from] json::Error), + + #[error(transparent)] + Rust(#[from] rust::Error), + + #[error(transparent)] + Typescript(#[from] typescript::Error), +} + +impl Cmd { + pub async fn run(&self) -> Result<(), Error> { + match &self { + Cmd::Json(json) => json.run()?, + Cmd::Rust(rust) => rust.run()?, + Cmd::Typescript(ts) => ts.run().await?, + } + Ok(()) + } +} diff --git a/cmd/soroban-cli/src/commands/contract/bindings/json.rs b/cmd/soroban-cli/src/commands/contract/bindings/json.rs new file mode 100644 index 00000000..060f9064 --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/bindings/json.rs @@ -0,0 +1,29 @@ +use std::fmt::Debug; + +use clap::{command, Parser}; +use soroban_spec_json; + +use crate::wasm; + +#[derive(Parser, Debug, Clone)] +#[group(skip)] +pub struct Cmd { + #[command(flatten)] + wasm: wasm::Args, +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("generate json from file: {0}")] + GenerateJsonFromFile(soroban_spec_json::GenerateFromFileError), +} + +impl Cmd { + pub fn run(&self) -> Result<(), Error> { + let wasm_path_str = self.wasm.wasm.to_string_lossy(); + let json = soroban_spec_json::generate_from_file(&wasm_path_str, None) + .map_err(Error::GenerateJsonFromFile)?; + println!("{json}"); + Ok(()) + } +} diff --git a/cmd/soroban-cli/src/commands/contract/bindings/rust.rs b/cmd/soroban-cli/src/commands/contract/bindings/rust.rs new file mode 100644 index 00000000..176732ec --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/bindings/rust.rs @@ -0,0 +1,39 @@ +use std::fmt::Debug; + +use clap::{command, Parser}; +use soroban_spec_rust::{self, ToFormattedString}; + +use crate::wasm; + +#[derive(Parser, Debug, Clone)] +#[group(skip)] +pub struct Cmd { + #[command(flatten)] + wasm: wasm::Args, +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("generate rust from file: {0}")] + GenerateRustFromFile(soroban_spec_rust::GenerateFromFileError), + #[error("format rust error: {0}")] + FormatRust(String), +} + +impl Cmd { + pub fn run(&self) -> Result<(), Error> { + let wasm_path_str = self.wasm.wasm.to_string_lossy(); + let code = soroban_spec_rust::generate_from_file(&wasm_path_str, None) + .map_err(Error::GenerateRustFromFile)?; + match code.to_formatted_string() { + Ok(formatted) => { + println!("{formatted}"); + Ok(()) + } + Err(e) => { + println!("{code}"); + Err(Error::FormatRust(e.to_string())) + } + } + } +} diff --git a/cmd/soroban-cli/src/commands/contract/bindings/typescript.rs b/cmd/soroban-cli/src/commands/contract/bindings/typescript.rs new file mode 100644 index 00000000..19c7eecd --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/bindings/typescript.rs @@ -0,0 +1,131 @@ +use std::{ffi::OsString, fmt::Debug, path::PathBuf}; + +use clap::{command, Parser}; +use soroban_spec_typescript::{self as typescript, boilerplate::Project}; + +use crate::wasm; +use crate::{ + commands::{ + config::locator, + contract::{self, fetch}, + network::{self, Network}, + }, + utils::contract_spec::{self, ContractSpec}, +}; + +#[derive(Parser, Debug, Clone)] +#[group(skip)] +pub struct Cmd { + /// Path to optional wasm binary + #[arg(long)] + pub wasm: Option, + /// Where to place generated project + #[arg(long)] + output_dir: PathBuf, + /// Whether to overwrite output directory if it already exists + #[arg(long)] + overwrite: bool, + /// The contract ID/address on the network + #[arg(long, visible_alias = "id")] + contract_id: String, + #[command(flatten)] + locator: locator::Args, + #[command(flatten)] + network: network::Args, +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("failed generate TS from file: {0}")] + GenerateTSFromFile(typescript::GenerateFromFileError), + #[error(transparent)] + Io(#[from] std::io::Error), + + #[error("--output-dir cannot be a file: {0:?}")] + IsFile(PathBuf), + + #[error("--output-dir already exists and you did not specify --overwrite: {0:?}")] + OutputDirExists(PathBuf), + + #[error("--output-dir filepath not representable as utf-8: {0:?}")] + NotUtf8(OsString), + + #[error(transparent)] + Network(#[from] network::Error), + + #[error(transparent)] + Locator(#[from] locator::Error), + #[error(transparent)] + Fetch(#[from] fetch::Error), + #[error(transparent)] + Spec(#[from] contract_spec::Error), + #[error(transparent)] + Wasm(#[from] wasm::Error), + #[error("Failed to get file name from path: {0:?}")] + FailedToGetFileName(PathBuf), +} + +impl Cmd { + pub async fn run(&self) -> Result<(), Error> { + let spec = if let Some(wasm) = &self.wasm { + let wasm: wasm::Args = wasm.into(); + wasm.parse()?.spec + } else { + let fetch = contract::fetch::Cmd { + contract_id: self.contract_id.clone(), + out_file: None, + locator: self.locator.clone(), + network: self.network.clone(), + }; + let bytes = fetch.get_bytes().await?; + ContractSpec::new(&bytes)?.spec + }; + if self.output_dir.is_file() { + return Err(Error::IsFile(self.output_dir.clone())); + } + if self.output_dir.exists() { + if self.overwrite { + std::fs::remove_dir_all(&self.output_dir)?; + } else { + return Err(Error::OutputDirExists(self.output_dir.clone())); + } + } + std::fs::create_dir_all(&self.output_dir)?; + let p: Project = self.output_dir.clone().try_into()?; + let Network { + rpc_url, + network_passphrase, + .. + } = self + .network + .get(&self.locator) + .ok() + .unwrap_or_else(Network::futurenet); + let absolute_path = self.output_dir.canonicalize()?; + let file_name = absolute_path + .file_name() + .ok_or_else(|| Error::FailedToGetFileName(absolute_path.clone()))?; + let contract_name = &file_name + .to_str() + .ok_or_else(|| Error::NotUtf8(file_name.to_os_string()))?; + p.init( + contract_name, + &self.contract_id, + &rpc_url, + &network_passphrase, + &spec, + )?; + std::process::Command::new("npm") + .arg("install") + .current_dir(&self.output_dir) + .spawn()? + .wait()?; + std::process::Command::new("npm") + .arg("run") + .arg("build") + .current_dir(&self.output_dir) + .spawn()? + .wait()?; + Ok(()) + } +} diff --git a/cmd/soroban-cli/src/commands/contract/build.rs b/cmd/soroban-cli/src/commands/contract/build.rs new file mode 100644 index 00000000..ba17bd1b --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/build.rs @@ -0,0 +1,194 @@ +use clap::Parser; +use itertools::Itertools; +use std::{ + collections::HashSet, + env, + ffi::OsStr, + fmt::Debug, + fs, io, + path::Path, + process::{Command, ExitStatus, Stdio}, +}; + +use cargo_metadata::{Metadata, MetadataCommand, Package}; + +/// Build a contract from source +/// +/// Builds all crates that are referenced by the cargo manifest (Cargo.toml) +/// that have cdylib as their crate-type. Crates are built for the wasm32 +/// target. Unless configured otherwise, crates are built with their default +/// features and with their release profile. +/// +/// To view the commands that will be executed, without executing them, use the +/// --print-commands-only option. +#[derive(Parser, Debug, Clone)] +pub struct Cmd { + /// Path to Cargo.toml + #[arg(long, default_value = "Cargo.toml")] + pub manifest_path: std::path::PathBuf, + /// Package to build + /// + /// If omitted, all packages that build for crate-type cdylib are built. + #[arg(long)] + pub package: Option, + /// Build with the specified profile + #[arg(long, default_value = "release")] + pub profile: String, + /// Build with the list of features activated, space or comma separated + #[arg(long, help_heading = "Features")] + pub features: Option, + /// Build with the all features activated + #[arg( + long, + conflicts_with = "features", + conflicts_with = "no_default_features", + help_heading = "Features" + )] + pub all_features: bool, + /// Build with the default feature not activated + #[arg(long, help_heading = "Features")] + pub no_default_features: bool, + /// Directory to copy wasm files to + /// + /// If provided, wasm files can be found in the cargo target directory, and + /// the specified directory. + /// + /// If ommitted, wasm files are written only to the cargo target directory. + #[arg(long)] + pub out_dir: Option, + /// Print commands to build without executing them + #[arg(long, conflicts_with = "out_dir", help_heading = "Other")] + pub print_commands_only: bool, +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Metadata(#[from] cargo_metadata::Error), + #[error(transparent)] + CargoCmd(io::Error), + #[error("exit status {0}")] + Exit(ExitStatus), + #[error("package {package} not found")] + PackageNotFound { package: String }, + #[error("creating out directory: {0}")] + CreatingOutDir(io::Error), + #[error("copying wasm file: {0}")] + CopyingWasmFile(io::Error), + #[error("getting the current directory: {0}")] + GettingCurrentDir(io::Error), +} + +impl Cmd { + pub fn run(&self) -> Result<(), Error> { + let working_dir = env::current_dir().map_err(Error::GettingCurrentDir)?; + + let metadata = self.metadata()?; + let packages = self.packages(&metadata); + let target_dir = &metadata.target_directory; + + if let Some(package) = &self.package { + if packages.is_empty() { + return Err(Error::PackageNotFound { + package: package.clone(), + }); + } + } + + for p in packages { + let mut cmd = Command::new("cargo"); + cmd.stdout(Stdio::piped()); + cmd.arg("rustc"); + let manifest_path = pathdiff::diff_paths(&p.manifest_path, &working_dir) + .unwrap_or(p.manifest_path.clone().into()); + cmd.arg(format!( + "--manifest-path={}", + manifest_path.to_string_lossy() + )); + cmd.arg("--crate-type=cdylib"); + cmd.arg("--target=wasm32-unknown-unknown"); + if self.profile == "release" { + cmd.arg("--release"); + } else { + cmd.arg(format!("--profile={}", self.profile)); + } + if self.all_features { + cmd.arg("--all-features"); + } + if self.no_default_features { + cmd.arg("--no-default-features"); + } + if let Some(features) = self.features() { + let requested: HashSet = features.iter().cloned().collect(); + let available = p.features.iter().map(|f| f.0).cloned().collect(); + let activate = requested.intersection(&available).join(","); + if !activate.is_empty() { + cmd.arg(format!("--features={activate}")); + } + } + let cmd_str = format!( + "cargo {}", + cmd.get_args().map(OsStr::to_string_lossy).join(" ") + ); + + if self.print_commands_only { + println!("{cmd_str}"); + } else { + eprintln!("{cmd_str}"); + let status = cmd.status().map_err(Error::CargoCmd)?; + if !status.success() { + return Err(Error::Exit(status)); + } + + if let Some(out_dir) = &self.out_dir { + fs::create_dir_all(out_dir).map_err(Error::CreatingOutDir)?; + + let file = format!("{}.wasm", p.name.replace('-', "_")); + let target_file_path = Path::new(target_dir) + .join("wasm32-unknown-unknown") + .join(&self.profile) + .join(&file); + let out_file_path = Path::new(out_dir).join(&file); + fs::copy(target_file_path, out_file_path).map_err(Error::CopyingWasmFile)?; + } + } + } + + Ok(()) + } + + fn features(&self) -> Option> { + self.features + .as_ref() + .map(|f| f.split(&[',', ' ']).map(String::from).collect()) + } + + fn packages(&self, metadata: &Metadata) -> Vec { + metadata + .packages + .iter() + .filter(|p| + // Filter by the package name if one is provided. + self.package.is_none() || Some(&p.name) == self.package.as_ref()) + .filter(|p| { + // Filter crates by those that build to cdylib (wasm), unless a + // package is provided. + self.package.is_some() + || p.targets + .iter() + .any(|t| t.crate_types.iter().any(|c| c == "cdylib")) + }) + .cloned() + .collect() + } + + fn metadata(&self) -> Result { + let mut cmd = MetadataCommand::new(); + cmd.no_deps(); + cmd.manifest_path(&self.manifest_path); + // Do not configure features on the metadata command, because we are + // only collecting non-dependency metadata, features have no impact on + // the output. + cmd.exec() + } +} diff --git a/cmd/soroban-cli/src/commands/contract/deploy.rs b/cmd/soroban-cli/src/commands/contract/deploy.rs new file mode 100644 index 00000000..9baf4459 --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/deploy.rs @@ -0,0 +1,28 @@ +pub mod asset; +pub mod wasm; + +#[derive(Debug, clap::Subcommand)] +pub enum Cmd { + /// Deploy builtin Soroban Asset Contract + Asset(asset::Cmd), + /// Deploy normal Wasm Contract + Wasm(wasm::Cmd), +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Asset(#[from] asset::Error), + #[error(transparent)] + Wasm(#[from] wasm::Error), +} + +impl Cmd { + pub async fn run(&self) -> Result<(), Error> { + match &self { + Cmd::Asset(asset) => asset.run().await?, + Cmd::Wasm(wasm) => wasm.run().await?, + } + Ok(()) + } +} diff --git a/cmd/soroban-cli/src/commands/contract/deploy/asset.rs b/cmd/soroban-cli/src/commands/contract/deploy/asset.rs new file mode 100644 index 00000000..c10bf816 --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/deploy/asset.rs @@ -0,0 +1,155 @@ +use clap::{arg, command, Parser}; +use soroban_env_host::{ + xdr::{ + Asset, ContractDataDurability, ContractExecutable, ContractIdPreimage, CreateContractArgs, + Error as XdrError, Hash, HostFunction, InvokeHostFunctionOp, LedgerKey::ContractData, + LedgerKeyContractData, Memo, MuxedAccount, Operation, OperationBody, Preconditions, + ScAddress, ScVal, SequenceNumber, Transaction, TransactionExt, Uint256, VecM, + }, + HostError, +}; +use std::convert::Infallible; +use std::{array::TryFromSliceError, fmt::Debug, num::ParseIntError}; + +use crate::{ + commands::config, + rpc::{Client, Error as SorobanRpcError}, + utils::{contract_id_hash_from_asset, parsing::parse_asset}, +}; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + // TODO: the Display impl of host errors is pretty user-unfriendly + // (it just calls Debug). I think we can do better than that + Host(#[from] HostError), + #[error("error parsing int: {0}")] + ParseIntError(#[from] ParseIntError), + #[error(transparent)] + Client(#[from] SorobanRpcError), + #[error("internal conversion error: {0}")] + TryFromSliceError(#[from] TryFromSliceError), + #[error("xdr processing error: {0}")] + Xdr(#[from] XdrError), + #[error(transparent)] + Config(#[from] config::Error), + #[error(transparent)] + ParseAssetError(#[from] crate::utils::parsing::Error), +} + +impl From for Error { + fn from(_: Infallible) -> Self { + unreachable!() + } +} + +#[derive(Parser, Debug, Clone)] +#[group(skip)] +pub struct Cmd { + /// ID of the Stellar classic asset to wrap, e.g. "USDC:G...5" + #[arg(long)] + pub asset: String, + + #[command(flatten)] + pub config: config::Args, + #[command(flatten)] + pub fee: crate::fee::Args, +} + +impl Cmd { + pub async fn run(&self) -> Result<(), Error> { + // Parse asset + let asset = parse_asset(&self.asset)?; + + let res_str = self.run_against_rpc_server(asset).await?; + println!("{res_str}"); + Ok(()) + } + + async fn run_against_rpc_server(&self, asset: Asset) -> Result { + let network = self.config.get_network()?; + let client = Client::new(&network.rpc_url)?; + client + .verify_network_passphrase(Some(&network.network_passphrase)) + .await?; + let key = self.config.key_pair()?; + + // Get the account sequence number + let public_strkey = + stellar_strkey::ed25519::PublicKey(key.verifying_key().to_bytes()).to_string(); + // TODO: use symbols for the method names (both here and in serve) + let account_details = client.get_account(&public_strkey).await?; + let sequence: i64 = account_details.seq_num.into(); + let network_passphrase = &network.network_passphrase; + let contract_id = contract_id_hash_from_asset(&asset, network_passphrase)?; + let tx = build_wrap_token_tx( + &asset, + &contract_id, + sequence + 1, + self.fee.fee, + network_passphrase, + &key, + )?; + + client + .prepare_and_send_transaction(&tx, &key, &[], network_passphrase, None, None) + .await?; + + Ok(stellar_strkey::Contract(contract_id.0).to_string()) + } +} + +fn build_wrap_token_tx( + asset: &Asset, + contract_id: &Hash, + sequence: i64, + fee: u32, + _network_passphrase: &str, + key: &ed25519_dalek::SigningKey, +) -> Result { + let contract = ScAddress::Contract(contract_id.clone()); + let mut read_write = vec![ + ContractData(LedgerKeyContractData { + contract: contract.clone(), + key: ScVal::LedgerKeyContractInstance, + durability: ContractDataDurability::Persistent, + }), + ContractData(LedgerKeyContractData { + contract: contract.clone(), + key: ScVal::Vec(Some( + vec![ScVal::Symbol("Metadata".try_into().unwrap())].try_into()?, + )), + durability: ContractDataDurability::Persistent, + }), + ]; + if asset != &Asset::Native { + read_write.push(ContractData(LedgerKeyContractData { + contract, + key: ScVal::Vec(Some( + vec![ScVal::Symbol("Admin".try_into().unwrap())].try_into()?, + )), + durability: ContractDataDurability::Persistent, + })); + } + + let op = Operation { + source_account: None, + body: OperationBody::InvokeHostFunction(InvokeHostFunctionOp { + host_function: HostFunction::CreateContract(CreateContractArgs { + contract_id_preimage: ContractIdPreimage::Asset(asset.clone()), + executable: ContractExecutable::StellarAsset, + }), + auth: VecM::default(), + }), + }; + + Ok(Transaction { + source_account: MuxedAccount::Ed25519(Uint256(key.verifying_key().to_bytes())), + fee, + seq_num: SequenceNumber(sequence), + cond: Preconditions::None, + memo: Memo::None, + operations: vec![op].try_into()?, + ext: TransactionExt::V0, + }) +} diff --git a/cmd/soroban-cli/src/commands/contract/deploy/wasm.rs b/cmd/soroban-cli/src/commands/contract/deploy/wasm.rs new file mode 100644 index 00000000..76c13017 --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/deploy/wasm.rs @@ -0,0 +1,228 @@ +use std::array::TryFromSliceError; +use std::fmt::Debug; +use std::num::ParseIntError; + +use clap::{arg, command, Parser}; +use rand::Rng; +use soroban_env_host::{ + xdr::{ + AccountId, ContractExecutable, ContractIdPreimage, ContractIdPreimageFromAddress, + CreateContractArgs, Error as XdrError, Hash, HostFunction, InvokeHostFunctionOp, Memo, + MuxedAccount, Operation, OperationBody, Preconditions, PublicKey, ScAddress, + SequenceNumber, Transaction, TransactionExt, Uint256, VecM, + }, + HostError, +}; + +use crate::commands::contract::{self, id::wasm::get_contract_id}; +use crate::{ + commands::{config, contract::install, HEADING_RPC}, + rpc::{self, Client}, + utils, wasm, +}; + +#[derive(Parser, Debug, Clone)] +#[command(group( + clap::ArgGroup::new("wasm_src") + .required(true) + .args(&["wasm", "wasm_hash"]), +))] +#[group(skip)] +pub struct Cmd { + /// WASM file to deploy + #[arg(long, group = "wasm_src")] + wasm: Option, + /// Hash of the already installed/deployed WASM file + #[arg(long = "wasm-hash", conflicts_with = "wasm", group = "wasm_src")] + wasm_hash: Option, + /// Custom salt 32-byte salt for the token id + #[arg( + long, + help_heading = HEADING_RPC, + )] + salt: Option, + #[command(flatten)] + config: config::Args, + #[command(flatten)] + pub fee: crate::fee::Args, + #[arg(long, short = 'i', default_value = "false")] + /// Whether to ignore safety checks when deploying contracts + pub ignore_checks: bool, +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Install(#[from] install::Error), + #[error(transparent)] + Host(#[from] HostError), + #[error("error parsing int: {0}")] + ParseIntError(#[from] ParseIntError), + #[error("internal conversion error: {0}")] + TryFromSliceError(#[from] TryFromSliceError), + #[error("xdr processing error: {0}")] + Xdr(#[from] XdrError), + #[error("jsonrpc error: {0}")] + JsonRpc(#[from] jsonrpsee_core::Error), + #[error("cannot parse salt: {salt}")] + CannotParseSalt { salt: String }, + #[error("cannot parse contract ID {contract_id}: {error}")] + CannotParseContractId { + contract_id: String, + error: stellar_strkey::DecodeError, + }, + #[error("cannot parse WASM hash {wasm_hash}: {error}")] + CannotParseWasmHash { + wasm_hash: String, + error: stellar_strkey::DecodeError, + }, + #[error("Must provide either --wasm or --wash-hash")] + WasmNotProvided, + #[error(transparent)] + Rpc(#[from] rpc::Error), + #[error(transparent)] + Config(#[from] config::Error), + #[error(transparent)] + StrKey(#[from] stellar_strkey::DecodeError), + #[error(transparent)] + Infallible(#[from] std::convert::Infallible), + #[error(transparent)] + WasmId(#[from] contract::id::wasm::Error), +} + +impl Cmd { + pub async fn run(&self) -> Result<(), Error> { + let res_str = self.run_and_get_contract_id().await?; + println!("{res_str}"); + Ok(()) + } + + pub async fn run_and_get_contract_id(&self) -> Result { + let wasm_hash = if let Some(wasm) = &self.wasm { + let hash = install::Cmd { + wasm: wasm::Args { wasm: wasm.clone() }, + config: self.config.clone(), + fee: self.fee.clone(), + ignore_checks: self.ignore_checks, + } + .run_and_get_hash() + .await?; + hex::encode(hash) + } else { + self.wasm_hash + .as_ref() + .ok_or(Error::WasmNotProvided)? + .to_string() + }; + + let hash = Hash(utils::contract_id_from_str(&wasm_hash).map_err(|e| { + Error::CannotParseWasmHash { + wasm_hash: wasm_hash.clone(), + error: e, + } + })?); + + self.run_against_rpc_server(hash).await + } + + async fn run_against_rpc_server(&self, wasm_hash: Hash) -> Result { + let network = self.config.get_network()?; + let salt: [u8; 32] = match &self.salt { + Some(h) => soroban_spec_tools::utils::padded_hex_from_str(h, 32) + .map_err(|_| Error::CannotParseSalt { salt: h.clone() })? + .try_into() + .map_err(|_| Error::CannotParseSalt { salt: h.clone() })?, + None => rand::thread_rng().gen::<[u8; 32]>(), + }; + + let client = Client::new(&network.rpc_url)?; + client + .verify_network_passphrase(Some(&network.network_passphrase)) + .await?; + let key = self.config.key_pair()?; + + // Get the account sequence number + let public_strkey = + stellar_strkey::ed25519::PublicKey(key.verifying_key().to_bytes()).to_string(); + + let account_details = client.get_account(&public_strkey).await?; + let sequence: i64 = account_details.seq_num.into(); + let (tx, contract_id) = build_create_contract_tx( + wasm_hash, + sequence + 1, + self.fee.fee, + &network.network_passphrase, + salt, + &key, + )?; + client + .prepare_and_send_transaction(&tx, &key, &[], &network.network_passphrase, None, None) + .await?; + Ok(stellar_strkey::Contract(contract_id.0).to_string()) + } +} + +fn build_create_contract_tx( + hash: Hash, + sequence: i64, + fee: u32, + network_passphrase: &str, + salt: [u8; 32], + key: &ed25519_dalek::SigningKey, +) -> Result<(Transaction, Hash), Error> { + let source_account = AccountId(PublicKey::PublicKeyTypeEd25519( + key.verifying_key().to_bytes().into(), + )); + + let contract_id_preimage = ContractIdPreimage::Address(ContractIdPreimageFromAddress { + address: ScAddress::Account(source_account), + salt: Uint256(salt), + }); + let contract_id = get_contract_id(contract_id_preimage.clone(), network_passphrase)?; + + let op = Operation { + source_account: None, + body: OperationBody::InvokeHostFunction(InvokeHostFunctionOp { + host_function: HostFunction::CreateContract(CreateContractArgs { + contract_id_preimage, + executable: ContractExecutable::Wasm(hash), + }), + auth: VecM::default(), + }), + }; + let tx = Transaction { + source_account: MuxedAccount::Ed25519(Uint256(key.verifying_key().to_bytes())), + fee, + seq_num: SequenceNumber(sequence), + cond: Preconditions::None, + memo: Memo::None, + operations: vec![op].try_into()?, + ext: TransactionExt::V0, + }; + + Ok((tx, Hash(contract_id.into()))) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_build_create_contract() { + let hash = hex::decode("0000000000000000000000000000000000000000000000000000000000000000") + .unwrap() + .try_into() + .unwrap(); + let result = build_create_contract_tx( + Hash(hash), + 300, + 1, + "Public Global Stellar Network ; September 2015", + [0u8; 32], + &utils::parse_secret_key("SBFGFF27Y64ZUGFAIG5AMJGQODZZKV2YQKAVUUN4HNE24XZXD2OEUVUP") + .unwrap(), + ); + + assert!(result.is_ok()); + } +} diff --git a/cmd/soroban-cli/src/commands/contract/extend.rs b/cmd/soroban-cli/src/commands/contract/extend.rs new file mode 100644 index 00000000..7e9f1e98 --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/extend.rs @@ -0,0 +1,192 @@ +use std::{fmt::Debug, path::Path, str::FromStr}; + +use clap::{command, Parser}; +use soroban_env_host::xdr::{ + Error as XdrError, ExtendFootprintTtlOp, ExtensionPoint, LedgerEntry, LedgerEntryChange, + LedgerEntryData, LedgerFootprint, Memo, MuxedAccount, Operation, OperationBody, Preconditions, + SequenceNumber, SorobanResources, SorobanTransactionData, Transaction, TransactionExt, + TransactionMeta, TransactionMetaV3, TtlEntry, Uint256, +}; + +use crate::{ + commands::config, + key, + rpc::{self, Client}, + wasm, Pwd, +}; + +const MAX_LEDGERS_TO_EXTEND: u32 = 535_679; + +#[derive(Parser, Debug, Clone)] +#[group(skip)] +pub struct Cmd { + /// Number of ledgers to extend the entries + #[arg(long, required = true)] + pub ledgers_to_extend: u32, + /// Only print the new Time To Live ledger + #[arg(long)] + pub ttl_ledger_only: bool, + #[command(flatten)] + pub key: key::Args, + #[command(flatten)] + pub config: config::Args, + #[command(flatten)] + pub fee: crate::fee::Args, +} + +impl FromStr for Cmd { + type Err = clap::error::Error; + + fn from_str(s: &str) -> Result { + use clap::{CommandFactory, FromArgMatches}; + Self::from_arg_matches_mut(&mut Self::command().get_matches_from(s.split_whitespace())) + } +} + +impl Pwd for Cmd { + fn set_pwd(&mut self, pwd: &Path) { + self.config.set_pwd(pwd); + } +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("parsing key {key}: {error}")] + CannotParseKey { + key: String, + error: soroban_spec_tools::Error, + }, + #[error("parsing XDR key {key}: {error}")] + CannotParseXdrKey { key: String, error: XdrError }, + + #[error(transparent)] + Config(#[from] config::Error), + #[error("either `--key` or `--key-xdr` are required")] + KeyIsRequired, + #[error("xdr processing error: {0}")] + Xdr(#[from] XdrError), + #[error("Ledger entry not found")] + LedgerEntryNotFound, + #[error("missing operation result")] + MissingOperationResult, + #[error(transparent)] + Rpc(#[from] rpc::Error), + #[error(transparent)] + Wasm(#[from] wasm::Error), + #[error(transparent)] + Key(#[from] key::Error), +} + +impl Cmd { + #[allow(clippy::too_many_lines)] + pub async fn run(&self) -> Result<(), Error> { + let ttl_ledger = self.run_against_rpc_server().await?; + if self.ttl_ledger_only { + println!("{ttl_ledger}"); + } else { + println!("New ttl ledger: {ttl_ledger}"); + } + + Ok(()) + } + + fn ledgers_to_extend(&self) -> u32 { + let res = u32::min(self.ledgers_to_extend, MAX_LEDGERS_TO_EXTEND); + if res < self.ledgers_to_extend { + tracing::warn!( + "Ledgers to extend is too large, using max value of {MAX_LEDGERS_TO_EXTEND}" + ); + } + res + } + + async fn run_against_rpc_server(&self) -> Result { + let network = self.config.get_network()?; + tracing::trace!(?network); + let keys = self.key.parse_keys()?; + let network = &self.config.get_network()?; + let client = Client::new(&network.rpc_url)?; + let key = self.config.key_pair()?; + let extend_to = self.ledgers_to_extend(); + + // Get the account sequence number + let public_strkey = + stellar_strkey::ed25519::PublicKey(key.verifying_key().to_bytes()).to_string(); + let account_details = client.get_account(&public_strkey).await?; + let sequence: i64 = account_details.seq_num.into(); + + let tx = Transaction { + source_account: MuxedAccount::Ed25519(Uint256(key.verifying_key().to_bytes())), + fee: self.fee.fee, + seq_num: SequenceNumber(sequence + 1), + cond: Preconditions::None, + memo: Memo::None, + operations: vec![Operation { + source_account: None, + body: OperationBody::ExtendFootprintTtl(ExtendFootprintTtlOp { + ext: ExtensionPoint::V0, + extend_to, + }), + }] + .try_into()?, + ext: TransactionExt::V1(SorobanTransactionData { + ext: ExtensionPoint::V0, + resources: SorobanResources { + footprint: LedgerFootprint { + read_only: keys.clone().try_into()?, + read_write: vec![].try_into()?, + }, + instructions: 0, + read_bytes: 0, + write_bytes: 0, + }, + resource_fee: 0, + }), + }; + + let (result, meta, events) = client + .prepare_and_send_transaction(&tx, &key, &[], &network.network_passphrase, None, None) + .await?; + + tracing::trace!(?result); + tracing::trace!(?meta); + if !events.is_empty() { + tracing::info!("Events:\n {events:#?}"); + } + + // The transaction from core will succeed regardless of whether it actually found & extended + // the entry, so we have to inspect the result meta to tell if it worked or not. + let TransactionMeta::V3(TransactionMetaV3 { operations, .. }) = meta else { + return Err(Error::LedgerEntryNotFound); + }; + + // Simply check if there is exactly one entry here. We only support extending a single + // entry via this command (which we should fix separately, but). + if operations.len() == 0 { + return Err(Error::LedgerEntryNotFound); + } + + if operations[0].changes.is_empty() { + let entry = client.get_full_ledger_entries(&keys).await?; + let extension = entry.entries[0].live_until_ledger_seq; + if entry.latest_ledger + i64::from(extend_to) < i64::from(extension) { + return Ok(extension); + } + } + + match (&operations[0].changes[0], &operations[0].changes[1]) { + ( + LedgerEntryChange::State(_), + LedgerEntryChange::Updated(LedgerEntry { + data: + LedgerEntryData::Ttl(TtlEntry { + live_until_ledger_seq, + .. + }), + .. + }), + ) => Ok(*live_until_ledger_seq), + _ => Err(Error::LedgerEntryNotFound), + } + } +} diff --git a/cmd/soroban-cli/src/commands/contract/fetch.rs b/cmd/soroban-cli/src/commands/contract/fetch.rs new file mode 100644 index 00000000..61a82fc4 --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/fetch.rs @@ -0,0 +1,185 @@ +use std::convert::Infallible; + +use std::io::Write; +use std::path::{Path, PathBuf}; +use std::str::FromStr; +use std::{fmt::Debug, fs, io}; + +use clap::{arg, command, Parser}; +use soroban_env_host::{ + budget::Budget, + storage::Storage, + xdr::{ + self, ContractCodeEntry, ContractDataDurability, ContractDataEntry, ContractExecutable, + Error as XdrError, LedgerEntryData, LedgerKey, LedgerKeyContractCode, + LedgerKeyContractData, ScAddress, ScContractInstance, ScVal, + }, +}; + +use soroban_spec::read::FromWasmError; +use stellar_strkey::DecodeError; + +use super::super::config::{self, locator}; +use crate::commands::network::{self, Network}; +use crate::{ + rpc::{self, Client}, + utils, Pwd, +}; + +#[derive(Parser, Debug, Default, Clone)] +#[allow(clippy::struct_excessive_bools)] +#[group(skip)] +pub struct Cmd { + /// Contract ID to fetch + #[arg(long = "id", env = "SOROBAN_CONTRACT_ID")] + pub contract_id: String, + /// Where to write output otherwise stdout is used + #[arg(long, short = 'o')] + pub out_file: Option, + #[command(flatten)] + pub locator: locator::Args, + #[command(flatten)] + pub network: network::Args, +} + +impl FromStr for Cmd { + type Err = clap::error::Error; + + fn from_str(s: &str) -> Result { + use clap::{CommandFactory, FromArgMatches}; + Self::from_arg_matches_mut(&mut Self::command().get_matches_from(s.split_whitespace())) + } +} + +impl Pwd for Cmd { + fn set_pwd(&mut self, pwd: &Path) { + self.locator.set_pwd(pwd); + } +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Rpc(#[from] rpc::Error), + #[error(transparent)] + Config(#[from] config::Error), + #[error(transparent)] + Locator(#[from] locator::Error), + #[error(transparent)] + Xdr(#[from] XdrError), + #[error(transparent)] + Spec(#[from] soroban_spec::read::FromWasmError), + #[error(transparent)] + Io(#[from] std::io::Error), + #[error("missing result")] + MissingResult, + #[error("unexpected contract code data type: {0:?}")] + UnexpectedContractCodeDataType(LedgerEntryData), + #[error("reading file {0:?}: {1}")] + CannotWriteContractFile(PathBuf, io::Error), + #[error("cannot parse contract ID {0}: {1}")] + CannotParseContractId(String, DecodeError), + #[error("network details not provided")] + NetworkNotProvided, + #[error(transparent)] + Network(#[from] network::Error), + #[error("cannot create contract directory for {0:?}")] + CannotCreateContractDir(PathBuf), +} + +impl From for Error { + fn from(_: Infallible) -> Self { + unreachable!() + } +} + +impl Cmd { + pub async fn run(&self) -> Result<(), Error> { + let bytes = self.get_bytes().await?; + if let Some(out_file) = &self.out_file { + if let Some(parent) = out_file.parent() { + if !parent.exists() { + fs::create_dir_all(parent) + .map_err(|_| Error::CannotCreateContractDir(out_file.clone()))?; + } + } + fs::write(out_file, bytes) + .map_err(|io| Error::CannotWriteContractFile(out_file.clone(), io)) + } else { + let stdout = std::io::stdout(); + let mut handle = stdout.lock(); + handle.write_all(&bytes)?; + handle.flush()?; + Ok(()) + } + } + + pub async fn get_bytes(&self) -> Result, Error> { + self.run_against_rpc_server().await + } + + pub fn network(&self) -> Result { + Ok(self.network.get(&self.locator)?) + } + + pub async fn run_against_rpc_server(&self) -> Result, Error> { + let network = self.network()?; + tracing::trace!(?network); + let contract_id = self.contract_id()?; + let client = Client::new(&network.rpc_url)?; + client + .verify_network_passphrase(Some(&network.network_passphrase)) + .await?; + // async closures are not yet stable + Ok(client.get_remote_wasm(&contract_id).await?) + } + + fn contract_id(&self) -> Result<[u8; 32], Error> { + utils::contract_id_from_str(&self.contract_id) + .map_err(|e| Error::CannotParseContractId(self.contract_id.clone(), e)) + } +} + +pub fn get_contract_wasm_from_storage( + storage: &mut Storage, + contract_id: [u8; 32], +) -> Result, FromWasmError> { + let key = LedgerKey::ContractData(LedgerKeyContractData { + contract: ScAddress::Contract(contract_id.into()), + key: ScVal::LedgerKeyContractInstance, + durability: ContractDataDurability::Persistent, + }); + match storage.get(&key.into(), &Budget::default()) { + Ok(rc) => match rc.as_ref() { + xdr::LedgerEntry { + data: + LedgerEntryData::ContractData(ContractDataEntry { + val: ScVal::ContractInstance(ScContractInstance { executable, .. }), + .. + }), + .. + } => match executable { + ContractExecutable::Wasm(hash) => { + if let Ok(rc) = storage.get( + &LedgerKey::ContractCode(LedgerKeyContractCode { hash: hash.clone() }) + .into(), + &Budget::default(), + ) { + match rc.as_ref() { + xdr::LedgerEntry { + data: LedgerEntryData::ContractCode(ContractCodeEntry { code, .. }), + .. + } => Ok(code.to_vec()), + _ => Err(FromWasmError::NotFound), + } + } else { + Err(FromWasmError::NotFound) + } + } + ContractExecutable::StellarAsset => todo!(), + }, + _ => Err(FromWasmError::NotFound), + }, + _ => Err(FromWasmError::NotFound), + } +} diff --git a/cmd/soroban-cli/src/commands/contract/id.rs b/cmd/soroban-cli/src/commands/contract/id.rs new file mode 100644 index 00000000..bb8744d5 --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/id.rs @@ -0,0 +1,28 @@ +pub mod asset; +pub mod wasm; + +#[derive(Debug, clap::Subcommand)] +pub enum Cmd { + /// Deploy builtin Soroban Asset Contract + Asset(asset::Cmd), + /// Deploy normal Wasm Contract + Wasm(wasm::Cmd), +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Asset(#[from] asset::Error), + #[error(transparent)] + Wasm(#[from] wasm::Error), +} + +impl Cmd { + pub fn run(&self) -> Result<(), Error> { + match &self { + Cmd::Asset(asset) => asset.run()?, + Cmd::Wasm(wasm) => wasm.run()?, + } + Ok(()) + } +} diff --git a/cmd/soroban-cli/src/commands/contract/id/asset.rs b/cmd/soroban-cli/src/commands/contract/id/asset.rs new file mode 100644 index 00000000..34e5767a --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/id/asset.rs @@ -0,0 +1,36 @@ +use clap::{arg, command, Parser}; + +use crate::commands::config; + +use crate::utils::contract_id_hash_from_asset; +use crate::utils::parsing::parse_asset; + +#[derive(Parser, Debug, Clone)] +#[group(skip)] +pub struct Cmd { + /// ID of the Stellar classic asset to wrap, e.g. "USDC:G...5" + #[arg(long)] + pub asset: String, + + #[command(flatten)] + pub config: config::Args, +} +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + ParseError(#[from] crate::utils::parsing::Error), + #[error(transparent)] + ConfigError(#[from] crate::commands::config::Error), + #[error(transparent)] + Xdr(#[from] soroban_env_host::xdr::Error), +} +impl Cmd { + pub fn run(&self) -> Result<(), Error> { + let asset = parse_asset(&self.asset)?; + let network = self.config.get_network()?; + let contract_id = contract_id_hash_from_asset(&asset, &network.network_passphrase)?; + let strkey_contract_id = stellar_strkey::Contract(contract_id.0).to_string(); + println!("{strkey_contract_id}"); + Ok(()) + } +} diff --git a/cmd/soroban-cli/src/commands/contract/id/wasm.rs b/cmd/soroban-cli/src/commands/contract/id/wasm.rs new file mode 100644 index 00000000..9c02f07d --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/id/wasm.rs @@ -0,0 +1,68 @@ +use clap::{arg, command, Parser}; +use sha2::{Digest, Sha256}; +use soroban_env_host::xdr::{ + self, AccountId, ContractIdPreimage, ContractIdPreimageFromAddress, Hash, HashIdPreimage, + HashIdPreimageContractId, Limits, PublicKey, ScAddress, Uint256, WriteXdr, +}; + +use crate::commands::config; + +#[derive(Parser, Debug, Clone)] +#[group(skip)] +pub struct Cmd { + /// ID of the Soroban contract + #[arg(long)] + pub salt: String, + + #[command(flatten)] + pub config: config::Args, +} +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + ParseError(#[from] crate::utils::parsing::Error), + #[error(transparent)] + ConfigError(#[from] crate::commands::config::Error), + #[error(transparent)] + Xdr(#[from] xdr::Error), + #[error("cannot parse salt {0}")] + CannotParseSalt(String), +} +impl Cmd { + pub fn run(&self) -> Result<(), Error> { + let salt: [u8; 32] = soroban_spec_tools::utils::padded_hex_from_str(&self.salt, 32) + .map_err(|_| Error::CannotParseSalt(self.salt.clone()))? + .try_into() + .map_err(|_| Error::CannotParseSalt(self.salt.clone()))?; + let contract_id_preimage = + contract_preimage(&self.config.key_pair()?.verifying_key(), salt); + let contract_id = get_contract_id( + contract_id_preimage.clone(), + &self.config.get_network()?.network_passphrase, + )?; + let strkey_contract_id = stellar_strkey::Contract(contract_id.0).to_string(); + println!("{strkey_contract_id}"); + Ok(()) + } +} + +pub fn contract_preimage(key: &ed25519_dalek::VerifyingKey, salt: [u8; 32]) -> ContractIdPreimage { + let source_account = AccountId(PublicKey::PublicKeyTypeEd25519(key.to_bytes().into())); + ContractIdPreimage::Address(ContractIdPreimageFromAddress { + address: ScAddress::Account(source_account), + salt: Uint256(salt), + }) +} + +pub fn get_contract_id( + contract_id_preimage: ContractIdPreimage, + network_passphrase: &str, +) -> Result { + let network_id = Hash(Sha256::digest(network_passphrase.as_bytes()).into()); + let preimage = HashIdPreimage::ContractId(HashIdPreimageContractId { + network_id, + contract_id_preimage, + }); + let preimage_xdr = preimage.to_xdr(Limits::none())?; + Ok(Hash(Sha256::digest(preimage_xdr).into())) +} diff --git a/cmd/soroban-cli/src/commands/contract/inspect.rs b/cmd/soroban-cli/src/commands/contract/inspect.rs new file mode 100644 index 00000000..355c18ca --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/inspect.rs @@ -0,0 +1,49 @@ +use clap::{command, Parser}; +use soroban_env_host::xdr; +use std::{fmt::Debug, path::PathBuf}; +use tracing::debug; + +use super::SpecOutput; +use crate::{commands::config::locator, wasm}; + +#[derive(Parser, Debug, Clone)] +#[group(skip)] +pub struct Cmd { + #[command(flatten)] + wasm: wasm::Args, + /// Output just XDR in base64 + #[arg(long, default_value = "docs")] + output: SpecOutput, + + #[clap(flatten)] + locator: locator::Args, +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Wasm(#[from] wasm::Error), + #[error("missing spec for {0:?}")] + MissingSpec(PathBuf), + #[error(transparent)] + Xdr(#[from] xdr::Error), + #[error(transparent)] + Spec(#[from] crate::utils::contract_spec::Error), +} + +impl Cmd { + pub fn run(&self) -> Result<(), Error> { + let wasm = self.wasm.parse()?; + debug!("File: {}", self.wasm.wasm.to_string_lossy()); + let output = match self.output { + SpecOutput::XdrBase64 => wasm + .spec_base64 + .clone() + .ok_or_else(|| Error::MissingSpec(self.wasm.wasm.clone()))?, + SpecOutput::XdrBase64Array => wasm.spec_as_json_array()?, + SpecOutput::Docs => wasm.to_string(), + }; + println!("{output}"); + Ok(()) + } +} diff --git a/cmd/soroban-cli/src/commands/contract/install.rs b/cmd/soroban-cli/src/commands/contract/install.rs new file mode 100644 index 00000000..90577529 --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/install.rs @@ -0,0 +1,223 @@ +use std::array::TryFromSliceError; +use std::fmt::Debug; +use std::num::ParseIntError; + +use clap::{command, Parser}; +use soroban_env_host::xdr::{ + Error as XdrError, Hash, HostFunction, InvokeHostFunctionOp, Memo, MuxedAccount, Operation, + OperationBody, Preconditions, ScMetaEntry, ScMetaV0, SequenceNumber, Transaction, + TransactionExt, TransactionResult, TransactionResultResult, Uint256, VecM, +}; + +use super::restore; +use crate::key; +use crate::rpc::{self, Client}; +use crate::{commands::config, utils, wasm}; + +const CONTRACT_META_SDK_KEY: &str = "rssdkver"; +const PUBLIC_NETWORK_PASSPHRASE: &str = "Public Global Stellar Network ; September 2015"; + +#[derive(Parser, Debug, Clone)] +#[group(skip)] +pub struct Cmd { + #[command(flatten)] + pub config: config::Args, + #[command(flatten)] + pub fee: crate::fee::Args, + #[command(flatten)] + pub wasm: wasm::Args, + #[arg(long, short = 'i', default_value = "false")] + /// Whether to ignore safety checks when deploying contracts + pub ignore_checks: bool, +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("error parsing int: {0}")] + ParseIntError(#[from] ParseIntError), + #[error("internal conversion error: {0}")] + TryFromSliceError(#[from] TryFromSliceError), + #[error("xdr processing error: {0}")] + Xdr(#[from] XdrError), + #[error("jsonrpc error: {0}")] + JsonRpc(#[from] jsonrpsee_core::Error), + #[error(transparent)] + Rpc(#[from] rpc::Error), + #[error(transparent)] + Config(#[from] config::Error), + #[error(transparent)] + Wasm(#[from] wasm::Error), + #[error("unexpected ({length}) simulate transaction result length")] + UnexpectedSimulateTransactionResultSize { length: usize }, + #[error(transparent)] + Restore(#[from] restore::Error), + #[error("cannot parse WASM file {wasm}: {error}")] + CannotParseWasm { + wasm: std::path::PathBuf, + error: wasm::Error, + }, + #[error("the deployed smart contract {wasm} was built with Soroban Rust SDK v{version}, a release candidate version not intended for use with the Stellar Public Network. To deploy anyway, use --ignore-checks")] + ContractCompiledWithReleaseCandidateSdk { + wasm: std::path::PathBuf, + version: String, + }, +} + +impl Cmd { + pub async fn run(&self) -> Result<(), Error> { + let res_str = hex::encode(self.run_and_get_hash().await?); + println!("{res_str}"); + Ok(()) + } + + pub async fn run_and_get_hash(&self) -> Result { + self.run_against_rpc_server(&self.wasm.read()?).await + } + + async fn run_against_rpc_server(&self, contract: &[u8]) -> Result { + let network = self.config.get_network()?; + let client = Client::new(&network.rpc_url)?; + client + .verify_network_passphrase(Some(&network.network_passphrase)) + .await?; + let wasm_spec = &self.wasm.parse().map_err(|e| Error::CannotParseWasm { + wasm: self.wasm.wasm.clone(), + error: e, + })?; + // Check Rust SDK version if using the public network. + if let Some(rs_sdk_ver) = get_contract_meta_sdk_version(wasm_spec) { + if rs_sdk_ver.contains("rc") + && !self.ignore_checks + && network.network_passphrase == PUBLIC_NETWORK_PASSPHRASE + { + return Err(Error::ContractCompiledWithReleaseCandidateSdk { + wasm: self.wasm.wasm.clone(), + version: rs_sdk_ver, + }); + } else if rs_sdk_ver.contains("rc") + && network.network_passphrase == PUBLIC_NETWORK_PASSPHRASE + { + tracing::warn!("the deployed smart contract {path} was built with Soroban Rust SDK v{rs_sdk_ver}, a release candidate version not intended for use with the Stellar Public Network", path = self.wasm.wasm.display()); + } + } + let key = self.config.key_pair()?; + + // Get the account sequence number + let public_strkey = + stellar_strkey::ed25519::PublicKey(key.verifying_key().to_bytes()).to_string(); + let account_details = client.get_account(&public_strkey).await?; + let sequence: i64 = account_details.seq_num.into(); + + let (tx_without_preflight, hash) = + build_install_contract_code_tx(contract, sequence + 1, self.fee.fee, &key)?; + + // Currently internal errors are not returned if the contract code is expired + if let ( + TransactionResult { + result: TransactionResultResult::TxInternalError, + .. + }, + _, + _, + ) = client + .prepare_and_send_transaction( + &tx_without_preflight, + &key, + &[], + &network.network_passphrase, + None, + None, + ) + .await? + { + // Now just need to restore it and don't have to install again + restore::Cmd { + key: key::Args { + contract_id: None, + key: None, + key_xdr: None, + wasm: Some(self.wasm.wasm.clone()), + wasm_hash: None, + durability: super::Durability::Persistent, + }, + config: self.config.clone(), + fee: self.fee.clone(), + ledgers_to_extend: None, + ttl_ledger_only: true, + } + .run_against_rpc_server() + .await?; + } + + Ok(hash) + } +} + +fn get_contract_meta_sdk_version(wasm_spec: &utils::contract_spec::ContractSpec) -> Option { + let rs_sdk_version_option = if let Some(_meta) = &wasm_spec.meta_base64 { + wasm_spec.meta.iter().find(|entry| match entry { + ScMetaEntry::ScMetaV0(ScMetaV0 { key, .. }) => { + key.to_utf8_string_lossy().contains(CONTRACT_META_SDK_KEY) + } + }) + } else { + None + }; + if let Some(rs_sdk_version_entry) = &rs_sdk_version_option { + match rs_sdk_version_entry { + ScMetaEntry::ScMetaV0(ScMetaV0 { val, .. }) => { + return Some(val.to_utf8_string_lossy()); + } + } + } + None +} + +pub(crate) fn build_install_contract_code_tx( + source_code: &[u8], + sequence: i64, + fee: u32, + key: &ed25519_dalek::SigningKey, +) -> Result<(Transaction, Hash), XdrError> { + let hash = utils::contract_hash(source_code)?; + + let op = Operation { + source_account: Some(MuxedAccount::Ed25519(Uint256( + key.verifying_key().to_bytes(), + ))), + body: OperationBody::InvokeHostFunction(InvokeHostFunctionOp { + host_function: HostFunction::UploadContractWasm(source_code.try_into()?), + auth: VecM::default(), + }), + }; + + let tx = Transaction { + source_account: MuxedAccount::Ed25519(Uint256(key.verifying_key().to_bytes())), + fee, + seq_num: SequenceNumber(sequence), + cond: Preconditions::None, + memo: Memo::None, + operations: vec![op].try_into()?, + ext: TransactionExt::V0, + }; + + Ok((tx, hash)) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_build_install_contract_code() { + let result = build_install_contract_code_tx( + b"foo", + 300, + 1, + &utils::parse_secret_key("SBFGFF27Y64ZUGFAIG5AMJGQODZZKV2YQKAVUUN4HNE24XZXD2OEUVUP") + .unwrap(), + ); + + assert!(result.is_ok()); + } +} diff --git a/cmd/soroban-cli/src/commands/contract/invoke.rs b/cmd/soroban-cli/src/commands/contract/invoke.rs new file mode 100644 index 00000000..669342b0 --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/invoke.rs @@ -0,0 +1,484 @@ +use std::collections::HashMap; +use std::convert::{Infallible, TryInto}; +use std::ffi::OsString; +use std::num::ParseIntError; +use std::path::{Path, PathBuf}; +use std::str::FromStr; +use std::{fmt::Debug, fs, io}; + +use clap::{arg, command, value_parser, Parser}; +use ed25519_dalek::SigningKey; +use heck::ToKebabCase; + +use soroban_env_host::{ + xdr::{ + self, Error as XdrError, Hash, HostFunction, InvokeContractArgs, InvokeHostFunctionOp, + LedgerEntryData, LedgerFootprint, Memo, MuxedAccount, Operation, OperationBody, + Preconditions, ScAddress, ScSpecEntry, ScSpecFunctionV0, ScSpecTypeDef, ScVal, ScVec, + SequenceNumber, SorobanAuthorizationEntry, SorobanResources, Transaction, TransactionExt, + Uint256, VecM, + }, + HostError, +}; + +use soroban_spec::read::FromWasmError; +use stellar_strkey::DecodeError; + +use super::super::{ + config::{self, locator}, + events, +}; +use crate::{ + commands::global, + rpc::{self, Client}, + utils::{self, contract_spec}, + Pwd, +}; +use soroban_spec_tools::Spec; + +#[derive(Parser, Debug, Default, Clone)] +#[allow(clippy::struct_excessive_bools)] +#[group(skip)] +pub struct Cmd { + /// Contract ID to invoke + #[arg(long = "id", env = "SOROBAN_CONTRACT_ID")] + pub contract_id: String, + // For testing only + #[arg(skip)] + pub wasm: Option, + /// Output the cost execution to stderr + #[arg(long = "cost")] + pub cost: bool, + /// Function name as subcommand, then arguments for that function as `--arg-name value` + #[arg(last = true, id = "CONTRACT_FN_AND_ARGS")] + pub slop: Vec, + #[command(flatten)] + pub config: config::Args, + #[command(flatten)] + pub fee: crate::fee::Args, +} + +impl FromStr for Cmd { + type Err = clap::error::Error; + + fn from_str(s: &str) -> Result { + use clap::{CommandFactory, FromArgMatches}; + Self::from_arg_matches_mut(&mut Self::command().get_matches_from(s.split_whitespace())) + } +} + +impl Pwd for Cmd { + fn set_pwd(&mut self, pwd: &Path) { + self.config.set_pwd(pwd); + } +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("parsing argument {arg}: {error}")] + CannotParseArg { + arg: String, + error: soroban_spec_tools::Error, + }, + #[error("cannot add contract to ledger entries: {0}")] + CannotAddContractToLedgerEntries(XdrError), + #[error(transparent)] + // TODO: the Display impl of host errors is pretty user-unfriendly + // (it just calls Debug). I think we can do better than that + Host(#[from] HostError), + #[error("reading file {0:?}: {1}")] + CannotReadContractFile(PathBuf, io::Error), + #[error("committing file {filepath}: {error}")] + CannotCommitEventsFile { + filepath: std::path::PathBuf, + error: events::Error, + }, + #[error("cannot parse contract ID {0}: {1}")] + CannotParseContractId(String, DecodeError), + #[error("function {0} was not found in the contract")] + FunctionNotFoundInContractSpec(String), + #[error("parsing contract spec: {0}")] + CannotParseContractSpec(FromWasmError), + // }, + #[error("function name {0} is too long")] + FunctionNameTooLong(String), + #[error("argument count ({current}) surpasses maximum allowed count ({maximum})")] + MaxNumberOfArgumentsReached { current: usize, maximum: usize }, + #[error("cannot print result {result:?}: {error}")] + CannotPrintResult { + result: ScVal, + error: soroban_spec_tools::Error, + }, + #[error(transparent)] + Xdr(#[from] XdrError), + #[error("error parsing int: {0}")] + ParseIntError(#[from] ParseIntError), + #[error(transparent)] + Rpc(#[from] rpc::Error), + #[error("unexpected contract code data type: {0:?}")] + UnexpectedContractCodeDataType(LedgerEntryData), + #[error("missing operation result")] + MissingOperationResult, + #[error("missing result")] + MissingResult, + #[error(transparent)] + StrVal(#[from] soroban_spec_tools::Error), + #[error("error loading signing key: {0}")] + SignatureError(#[from] ed25519_dalek::SignatureError), + #[error(transparent)] + Config(#[from] config::Error), + #[error("unexpected ({length}) simulate transaction result length")] + UnexpectedSimulateTransactionResultSize { length: usize }, + #[error("Missing argument {0}")] + MissingArgument(String), + #[error(transparent)] + Clap(#[from] clap::Error), + #[error(transparent)] + Locator(#[from] locator::Error), + #[error("Contract Error\n{0}: {1}")] + ContractInvoke(String, String), + #[error(transparent)] + StrKey(#[from] stellar_strkey::DecodeError), + #[error(transparent)] + ContractSpec(#[from] contract_spec::Error), + #[error("")] + MissingFileArg(PathBuf), +} + +impl From for Error { + fn from(_: Infallible) -> Self { + unreachable!() + } +} + +impl Cmd { + fn build_host_function_parameters( + &self, + contract_id: [u8; 32], + spec_entries: &[ScSpecEntry], + ) -> Result<(String, Spec, InvokeContractArgs, Vec), Error> { + let spec = Spec(Some(spec_entries.to_vec())); + let mut cmd = clap::Command::new(self.contract_id.clone()) + .no_binary_name(true) + .term_width(300) + .max_term_width(300); + + for ScSpecFunctionV0 { name, .. } in spec.find_functions()? { + cmd = cmd.subcommand(build_custom_cmd(&name.to_utf8_string_lossy(), &spec)?); + } + cmd.build(); + let long_help = cmd.render_long_help(); + let mut matches_ = cmd.get_matches_from(&self.slop); + let Some((function, matches_)) = &matches_.remove_subcommand() else { + println!("{long_help}"); + std::process::exit(1); + }; + + let func = spec.find_function(function)?; + // create parsed_args in same order as the inputs to func + let mut signers: Vec = vec![]; + let parsed_args = func + .inputs + .iter() + .map(|i| { + let name = i.name.to_utf8_string()?; + if let Some(mut val) = matches_.get_raw(&name) { + let mut s = val.next().unwrap().to_string_lossy().to_string(); + if matches!(i.type_, ScSpecTypeDef::Address) { + let cmd = crate::commands::keys::address::Cmd { + name: s.clone(), + hd_path: Some(0), + locator: self.config.locator.clone(), + }; + if let Ok(address) = cmd.public_key() { + s = address.to_string(); + } + if let Ok(key) = cmd.private_key() { + signers.push(key); + } + } + spec.from_string(&s, &i.type_) + .map_err(|error| Error::CannotParseArg { arg: name, error }) + } else if matches!(i.type_, ScSpecTypeDef::Option(_)) { + Ok(ScVal::Void) + } else if let Some(arg_path) = + matches_.get_one::(&fmt_arg_file_name(&name)) + { + if matches!(i.type_, ScSpecTypeDef::Bytes | ScSpecTypeDef::BytesN(_)) { + Ok(ScVal::try_from( + &std::fs::read(arg_path) + .map_err(|_| Error::MissingFileArg(arg_path.clone()))?, + ) + .map_err(|()| Error::CannotParseArg { + arg: name.clone(), + error: soroban_spec_tools::Error::Unknown, + })?) + } else { + let file_contents = std::fs::read_to_string(arg_path) + .map_err(|_| Error::MissingFileArg(arg_path.clone()))?; + tracing::debug!( + "file {arg_path:?}, has contents:\n{file_contents}\nAnd type {:#?}\n{}", + i.type_, + file_contents.len() + ); + spec.from_string(&file_contents, &i.type_) + .map_err(|error| Error::CannotParseArg { arg: name, error }) + } + } else { + Err(Error::MissingArgument(name)) + } + }) + .collect::, Error>>()?; + + let contract_address_arg = ScAddress::Contract(Hash(contract_id)); + let function_symbol_arg = function + .try_into() + .map_err(|()| Error::FunctionNameTooLong(function.clone()))?; + + let final_args = + parsed_args + .clone() + .try_into() + .map_err(|_| Error::MaxNumberOfArgumentsReached { + current: parsed_args.len(), + maximum: ScVec::default().max_len(), + })?; + + let invoke_args = InvokeContractArgs { + contract_address: contract_address_arg, + function_name: function_symbol_arg, + args: final_args, + }; + + Ok((function.clone(), spec, invoke_args, signers)) + } + + pub async fn run(&self, global_args: &global::Args) -> Result<(), Error> { + let res = self.invoke(global_args).await?; + println!("{res}"); + Ok(()) + } + + pub async fn invoke(&self, global_args: &global::Args) -> Result { + self.run_against_rpc_server(global_args).await + } + + pub async fn run_against_rpc_server( + &self, + global_args: &global::Args, + ) -> Result { + let network = self.config.get_network()?; + tracing::trace!(?network); + let contract_id = self.contract_id()?; + let spec_entries = self.spec_entries()?; + if let Some(spec_entries) = &spec_entries { + // For testing wasm arg parsing + let _ = self.build_host_function_parameters(contract_id, spec_entries)?; + } + let client = Client::new(&network.rpc_url)?; + client + .verify_network_passphrase(Some(&network.network_passphrase)) + .await?; + let key = self.config.key_pair()?; + + // Get the account sequence number + let public_strkey = + stellar_strkey::ed25519::PublicKey(key.verifying_key().to_bytes()).to_string(); + let account_details = client.get_account(&public_strkey).await?; + let sequence: i64 = account_details.seq_num.into(); + + // Get the contract + let spec_entries = client.get_remote_contract_spec(&contract_id).await?; + + // Get the ledger footprint + let (function, spec, host_function_params, signers) = + self.build_host_function_parameters(contract_id, &spec_entries)?; + let tx = build_invoke_contract_tx( + host_function_params.clone(), + sequence + 1, + self.fee.fee, + &key, + )?; + + let (result, meta, events) = client + .prepare_and_send_transaction( + &tx, + &key, + &signers, + &network.network_passphrase, + Some(log_events), + (global_args.verbose || global_args.very_verbose || self.cost) + .then_some(log_resources), + ) + .await?; + + tracing::debug!(?result); + crate::log::diagnostic_events(&events, tracing::Level::INFO); + let xdr::TransactionMeta::V3(xdr::TransactionMetaV3 { + soroban_meta: Some(xdr::SorobanTransactionMeta { return_value, .. }), + .. + }) = meta + else { + return Err(Error::MissingOperationResult); + }; + + output_to_string(&spec, &return_value, &function) + } + + pub fn read_wasm(&self) -> Result>, Error> { + Ok(if let Some(wasm) = self.wasm.as_ref() { + Some(fs::read(wasm).map_err(|e| Error::CannotReadContractFile(wasm.clone(), e))?) + } else { + None + }) + } + + pub fn spec_entries(&self) -> Result>, Error> { + self.read_wasm()? + .map(|wasm| { + soroban_spec::read::from_wasm(&wasm).map_err(Error::CannotParseContractSpec) + }) + .transpose() + } +} + +impl Cmd { + fn contract_id(&self) -> Result<[u8; 32], Error> { + utils::contract_id_from_str(&self.contract_id) + .map_err(|e| Error::CannotParseContractId(self.contract_id.clone(), e)) + } +} + +fn log_events( + footprint: &LedgerFootprint, + auth: &[VecM], + events: &[xdr::DiagnosticEvent], +) { + crate::log::auth(auth); + crate::log::diagnostic_events(events, tracing::Level::TRACE); + crate::log::footprint(footprint); +} + +fn log_resources(resources: &SorobanResources) { + crate::log::cost(resources); +} + +pub fn output_to_string(spec: &Spec, res: &ScVal, function: &str) -> Result { + let mut res_str = String::new(); + if let Some(output) = spec.find_function(function)?.outputs.first() { + res_str = spec + .xdr_to_json(res, output) + .map_err(|e| Error::CannotPrintResult { + result: res.clone(), + error: e, + })? + .to_string(); + } + Ok(res_str) +} + +fn build_invoke_contract_tx( + parameters: InvokeContractArgs, + sequence: i64, + fee: u32, + key: &SigningKey, +) -> Result { + let op = Operation { + source_account: None, + body: OperationBody::InvokeHostFunction(InvokeHostFunctionOp { + host_function: HostFunction::InvokeContract(parameters), + auth: VecM::default(), + }), + }; + Ok(Transaction { + source_account: MuxedAccount::Ed25519(Uint256(key.verifying_key().to_bytes())), + fee, + seq_num: SequenceNumber(sequence), + cond: Preconditions::None, + memo: Memo::None, + operations: vec![op].try_into()?, + ext: TransactionExt::V0, + }) +} + +fn build_custom_cmd(name: &str, spec: &Spec) -> Result { + let func = spec + .find_function(name) + .map_err(|_| Error::FunctionNotFoundInContractSpec(name.to_string()))?; + + // Parse the function arguments + let inputs_map = &func + .inputs + .iter() + .map(|i| (i.name.to_utf8_string().unwrap(), i.type_.clone())) + .collect::>(); + let name: &'static str = Box::leak(name.to_string().into_boxed_str()); + let mut cmd = clap::Command::new(name) + .no_binary_name(true) + .term_width(300) + .max_term_width(300); + let kebab_name = name.to_kebab_case(); + if kebab_name != name { + cmd = cmd.alias(kebab_name); + } + let doc: &'static str = Box::leak(func.doc.to_utf8_string_lossy().into_boxed_str()); + let long_doc: &'static str = Box::leak(arg_file_help(doc).into_boxed_str()); + + cmd = cmd.about(Some(doc)).long_about(long_doc); + for (name, type_) in inputs_map { + let mut arg = clap::Arg::new(name); + let file_arg_name = fmt_arg_file_name(name); + let mut file_arg = clap::Arg::new(&file_arg_name); + arg = arg + .long(name) + .alias(name.to_kebab_case()) + .num_args(1) + .value_parser(clap::builder::NonEmptyStringValueParser::new()) + .long_help(spec.doc(name, type_)?); + + file_arg = file_arg + .long(&file_arg_name) + .alias(file_arg_name.to_kebab_case()) + .num_args(1) + .hide(true) + .value_parser(value_parser!(PathBuf)) + .conflicts_with(name); + + if let Some(value_name) = spec.arg_value_name(type_, 0) { + let value_name: &'static str = Box::leak(value_name.into_boxed_str()); + arg = arg.value_name(value_name); + } + + // Set up special-case arg rules + arg = match type_ { + xdr::ScSpecTypeDef::Bool => arg + .num_args(0..1) + .default_missing_value("true") + .default_value("false") + .num_args(0..=1), + xdr::ScSpecTypeDef::Option(_val) => arg.required(false), + xdr::ScSpecTypeDef::I256 + | xdr::ScSpecTypeDef::I128 + | xdr::ScSpecTypeDef::I64 + | xdr::ScSpecTypeDef::I32 => arg.allow_hyphen_values(true), + _ => arg, + }; + + cmd = cmd.arg(arg); + cmd = cmd.arg(file_arg); + } + Ok(cmd) +} + +fn fmt_arg_file_name(name: &str) -> String { + format!("{name}-file-path") +} + +fn arg_file_help(docs: &str) -> String { + format!( + r#"{docs} +Usage Notes: +Each arg has a corresponding ---file-path which is a path to a file containing the corresponding JSON argument. +Note: The only types which aren't JSON are Bytes and Bytes which are raw bytes"# + ) +} diff --git a/cmd/soroban-cli/src/commands/contract/mod.rs b/cmd/soroban-cli/src/commands/contract/mod.rs new file mode 100644 index 00000000..35be97a7 --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/mod.rs @@ -0,0 +1,158 @@ +pub mod asset; +pub mod bindings; +pub mod build; +pub mod deploy; +pub mod extend; +pub mod fetch; +pub mod id; +pub mod inspect; +pub mod install; +pub mod invoke; +pub mod optimize; +pub mod read; +pub mod restore; + +use crate::commands::global; + +#[derive(Debug, clap::Subcommand)] +pub enum Cmd { + /// Utilities to deploy a Stellar Asset Contract or get its id + #[command(subcommand)] + Asset(asset::Cmd), + /// Generate code client bindings for a contract + #[command(subcommand)] + Bindings(bindings::Cmd), + + Build(build::Cmd), + + /// Extend the time to live ledger of a contract-data ledger entry. + /// + /// If no keys are specified the contract itself is extended. + Extend(extend::Cmd), + + /// Deploy a wasm contract + Deploy(deploy::wasm::Cmd), + + /// Fetch a contract's Wasm binary + Fetch(fetch::Cmd), + + /// Generate the contract id for a given contract or asset + #[command(subcommand)] + Id(id::Cmd), + + /// Inspect a WASM file listing contract functions, meta, etc + Inspect(inspect::Cmd), + + /// Install a WASM file to the ledger without creating a contract instance + Install(install::Cmd), + + /// Invoke a contract function + /// + /// Generates an "implicit CLI" for the specified contract on-the-fly using the contract's + /// schema, which gets embedded into every Soroban contract. The "slop" in this command, + /// everything after the `--`, gets passed to this implicit CLI. Get in-depth help for a given + /// contract: + /// + /// soroban contract invoke ... -- --help + Invoke(invoke::Cmd), + + /// Optimize a WASM file + Optimize(optimize::Cmd), + + /// Print the current value of a contract-data ledger entry + Read(read::Cmd), + + /// Restore an evicted value for a contract-data legder entry. + /// + /// If no keys are specificed the contract itself is restored. + Restore(restore::Cmd), +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Asset(#[from] asset::Error), + + #[error(transparent)] + Bindings(#[from] bindings::Error), + + #[error(transparent)] + Build(#[from] build::Error), + + #[error(transparent)] + Extend(#[from] extend::Error), + + #[error(transparent)] + Deploy(#[from] deploy::wasm::Error), + + #[error(transparent)] + Fetch(#[from] fetch::Error), + #[error(transparent)] + Id(#[from] id::Error), + + #[error(transparent)] + Inspect(#[from] inspect::Error), + + #[error(transparent)] + Install(#[from] install::Error), + + #[error(transparent)] + Invoke(#[from] invoke::Error), + + #[error(transparent)] + Optimize(#[from] optimize::Error), + + #[error(transparent)] + Read(#[from] read::Error), + + #[error(transparent)] + Restore(#[from] restore::Error), +} + +impl Cmd { + pub async fn run(&self, global_args: &global::Args) -> Result<(), Error> { + match &self { + Cmd::Asset(asset) => asset.run().await?, + Cmd::Bindings(bindings) => bindings.run().await?, + Cmd::Build(build) => build.run()?, + Cmd::Extend(extend) => extend.run().await?, + Cmd::Deploy(deploy) => deploy.run().await?, + Cmd::Id(id) => id.run()?, + Cmd::Inspect(inspect) => inspect.run()?, + Cmd::Install(install) => install.run().await?, + Cmd::Invoke(invoke) => invoke.run(global_args).await?, + Cmd::Optimize(optimize) => optimize.run()?, + Cmd::Fetch(fetch) => fetch.run().await?, + Cmd::Read(read) => read.run().await?, + Cmd::Restore(restore) => restore.run().await?, + } + Ok(()) + } +} + +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, clap::ValueEnum)] +pub enum Durability { + /// Persistent + Persistent, + /// Temporary + Temporary, +} + +impl From<&Durability> for soroban_env_host::xdr::ContractDataDurability { + fn from(d: &Durability) -> Self { + match d { + Durability::Persistent => soroban_env_host::xdr::ContractDataDurability::Persistent, + Durability::Temporary => soroban_env_host::xdr::ContractDataDurability::Temporary, + } + } +} + +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, clap::ValueEnum)] +pub enum SpecOutput { + /// XDR of array of contract spec entries + XdrBase64, + /// Array of xdr of contract spec entries + XdrBase64Array, + /// Pretty print of contract spec entries + Docs, +} diff --git a/cmd/soroban-cli/src/commands/contract/optimize.rs b/cmd/soroban-cli/src/commands/contract/optimize.rs new file mode 100644 index 00000000..751dabb1 --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/optimize.rs @@ -0,0 +1,80 @@ +use clap::{arg, command, Parser}; +use std::fmt::Debug; +#[cfg(feature = "opt")] +use wasm_opt::{Feature, OptimizationError, OptimizationOptions}; + +use crate::wasm; + +#[derive(Parser, Debug, Clone)] +#[group(skip)] +pub struct Cmd { + #[command(flatten)] + wasm: wasm::Args, + /// Path to write the optimized WASM file to (defaults to same location as --wasm with .optimized.wasm suffix) + #[arg(long)] + wasm_out: Option, +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Wasm(#[from] wasm::Error), + #[cfg(feature = "opt")] + #[error("optimization error: {0}")] + OptimizationError(OptimizationError), + #[cfg(not(feature = "opt"))] + #[error("Must install with \"opt\" feature, e.g. `cargo install soroban-cli --features opt")] + Install, +} + +impl Cmd { + #[cfg(not(feature = "opt"))] + pub fn run(&self) -> Result<(), Error> { + Err(Error::Install) + } + + #[cfg(feature = "opt")] + pub fn run(&self) -> Result<(), Error> { + let wasm_size = self.wasm.len()?; + + println!( + "Reading: {} ({} bytes)", + self.wasm.wasm.to_string_lossy(), + wasm_size + ); + + let wasm_out = self.wasm_out.as_ref().cloned().unwrap_or_else(|| { + let mut wasm_out = self.wasm.wasm.clone(); + wasm_out.set_extension("optimized.wasm"); + wasm_out + }); + println!("Writing to: {}...", wasm_out.to_string_lossy()); + + let mut options = OptimizationOptions::new_optimize_for_size_aggressively(); + options.converge = true; + + // Explicitly set to MVP + sign-ext + mutable-globals, which happens to + // also be the default featureset, but just to be extra clear we set it + // explicitly. + // + // Formerly Soroban supported only the MVP feature set, but Rust 1.70 as + // well as Clang generate code with sign-ext + mutable-globals enabled, + // so Soroban has taken a change to support them also. + options.mvp_features_only(); + options.enable_feature(Feature::MutableGlobals); + options.enable_feature(Feature::SignExt); + + options + .run(&self.wasm.wasm, &wasm_out) + .map_err(Error::OptimizationError)?; + + let wasm_out_size = wasm::len(&wasm_out)?; + println!( + "Optimized: {} ({} bytes)", + wasm_out.to_string_lossy(), + wasm_out_size + ); + + Ok(()) + } +} diff --git a/cmd/soroban-cli/src/commands/contract/read.rs b/cmd/soroban-cli/src/commands/contract/read.rs new file mode 100644 index 00000000..842832d5 --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/read.rs @@ -0,0 +1,180 @@ +use std::{ + fmt::Debug, + io::{self, stdout}, +}; + +use clap::{command, Parser, ValueEnum}; +use soroban_env_host::{ + xdr::{ + ContractDataEntry, Error as XdrError, LedgerEntryData, LedgerKey, LedgerKeyContractData, + ScVal, WriteXdr, + }, + HostError, +}; +use soroban_sdk::xdr::Limits; + +use crate::{ + commands::config, + key, + rpc::{self, Client, FullLedgerEntries, FullLedgerEntry}, +}; + +#[derive(Parser, Debug, Clone)] +#[group(skip)] +pub struct Cmd { + /// Type of output to generate + #[arg(long, value_enum, default_value("string"))] + pub output: Output, + #[command(flatten)] + pub key: key::Args, + #[command(flatten)] + config: config::Args, +} + +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, ValueEnum)] +pub enum Output { + /// String + String, + /// Json + Json, + /// XDR + Xdr, +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("parsing key {key}: {error}")] + CannotParseKey { + key: String, + error: soroban_spec_tools::Error, + }, + #[error("parsing XDR key {key}: {error}")] + CannotParseXdrKey { key: String, error: XdrError }, + #[error("cannot parse contract ID {contract_id}: {error}")] + CannotParseContractId { + contract_id: String, + error: stellar_strkey::DecodeError, + }, + #[error("cannot print result {result:?}: {error}")] + CannotPrintResult { + result: ScVal, + error: soroban_spec_tools::Error, + }, + #[error("cannot print result {result:?}: {error}")] + CannotPrintJsonResult { + result: ScVal, + error: serde_json::Error, + }, + #[error("cannot print as csv: {error}")] + CannotPrintAsCsv { error: csv::Error }, + #[error("cannot print: {error}")] + CannotPrintFlush { error: io::Error }, + #[error(transparent)] + Config(#[from] config::Error), + #[error("either `--key` or `--key-xdr` are required when querying a network")] + KeyIsRequired, + #[error(transparent)] + Rpc(#[from] rpc::Error), + #[error(transparent)] + Xdr(#[from] XdrError), + #[error(transparent)] + // TODO: the Display impl of host errors is pretty user-unfriendly + // (it just calls Debug). I think we can do better than that + Host(#[from] HostError), + #[error("no matching contract data entries were found for the specified contract id")] + NoContractDataEntryFoundForContractID, + #[error(transparent)] + Key(#[from] key::Error), + #[error("Only contract data and code keys are allowed")] + OnlyDataAllowed, +} + +impl Cmd { + pub async fn run(&self) -> Result<(), Error> { + let entries = self.run_against_rpc_server().await?; + self.output_entries(&entries) + } + + async fn run_against_rpc_server(&self) -> Result { + let network = self.config.get_network()?; + tracing::trace!(?network); + let network = &self.config.get_network()?; + let client = Client::new(&network.rpc_url)?; + let keys = self.key.parse_keys()?; + Ok(client.get_full_ledger_entries(&keys).await?) + } + + fn output_entries(&self, entries: &FullLedgerEntries) -> Result<(), Error> { + if entries.entries.is_empty() { + return Err(Error::NoContractDataEntryFoundForContractID); + } + tracing::trace!("{entries:#?}"); + let mut out = csv::Writer::from_writer(stdout()); + for FullLedgerEntry { + key, + val, + live_until_ledger_seq, + last_modified_ledger, + } in &entries.entries + { + let ( + LedgerKey::ContractData(LedgerKeyContractData { key, .. }), + LedgerEntryData::ContractData(ContractDataEntry { val, .. }), + ) = (key, val) + else { + return Err(Error::OnlyDataAllowed); + }; + let output = match self.output { + Output::String => [ + soroban_spec_tools::to_string(key).map_err(|e| Error::CannotPrintResult { + result: key.clone(), + error: e, + })?, + soroban_spec_tools::to_string(val).map_err(|e| Error::CannotPrintResult { + result: val.clone(), + error: e, + })?, + last_modified_ledger.to_string(), + live_until_ledger_seq.to_string(), + ], + Output::Json => [ + serde_json::to_string_pretty(&key).map_err(|error| { + Error::CannotPrintJsonResult { + result: key.clone(), + error, + } + })?, + serde_json::to_string_pretty(&val).map_err(|error| { + Error::CannotPrintJsonResult { + result: val.clone(), + error, + } + })?, + serde_json::to_string_pretty(&last_modified_ledger).map_err(|error| { + Error::CannotPrintJsonResult { + result: val.clone(), + error, + } + })?, + serde_json::to_string_pretty(&live_until_ledger_seq).map_err(|error| { + Error::CannotPrintJsonResult { + result: val.clone(), + error, + } + })?, + ], + Output::Xdr => [ + key.to_xdr_base64(Limits::none())?, + val.to_xdr_base64(Limits::none())?, + last_modified_ledger.to_xdr_base64(Limits::none())?, + live_until_ledger_seq.to_xdr_base64(Limits::none())?, + ], + }; + out.write_record(output) + .map_err(|e| Error::CannotPrintAsCsv { error: e })?; + } + out.flush() + .map_err(|e| Error::CannotPrintFlush { error: e })?; + Ok(()) + } +} diff --git a/cmd/soroban-cli/src/commands/contract/restore.rs b/cmd/soroban-cli/src/commands/contract/restore.rs new file mode 100644 index 00000000..38b8a84a --- /dev/null +++ b/cmd/soroban-cli/src/commands/contract/restore.rs @@ -0,0 +1,206 @@ +use std::{fmt::Debug, path::Path, str::FromStr}; + +use clap::{command, Parser}; +use soroban_env_host::xdr::{ + Error as XdrError, ExtensionPoint, LedgerEntry, LedgerEntryChange, LedgerEntryData, + LedgerFootprint, Memo, MuxedAccount, Operation, OperationBody, OperationMeta, Preconditions, + RestoreFootprintOp, SequenceNumber, SorobanResources, SorobanTransactionData, Transaction, + TransactionExt, TransactionMeta, TransactionMetaV3, TtlEntry, Uint256, +}; +use stellar_strkey::DecodeError; + +use crate::{ + commands::{ + config::{self, locator}, + contract::extend, + }, + key, + rpc::{self, Client}, + wasm, Pwd, +}; + +#[derive(Parser, Debug, Clone)] +#[group(skip)] +pub struct Cmd { + #[command(flatten)] + pub key: key::Args, + /// Number of ledgers to extend the entry + #[arg(long)] + pub ledgers_to_extend: Option, + /// Only print the new Time To Live ledger + #[arg(long)] + pub ttl_ledger_only: bool, + #[command(flatten)] + pub config: config::Args, + #[command(flatten)] + pub fee: crate::fee::Args, +} + +impl FromStr for Cmd { + type Err = clap::error::Error; + + fn from_str(s: &str) -> Result { + use clap::{CommandFactory, FromArgMatches}; + Self::from_arg_matches_mut(&mut Self::command().get_matches_from(s.split_whitespace())) + } +} + +impl Pwd for Cmd { + fn set_pwd(&mut self, pwd: &Path) { + self.config.set_pwd(pwd); + } +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("parsing key {key}: {error}")] + CannotParseKey { + key: String, + error: soroban_spec_tools::Error, + }, + #[error("parsing XDR key {key}: {error}")] + CannotParseXdrKey { key: String, error: XdrError }, + #[error("cannot parse contract ID {0}: {1}")] + CannotParseContractId(String, DecodeError), + #[error(transparent)] + Config(#[from] config::Error), + #[error("either `--key` or `--key-xdr` are required")] + KeyIsRequired, + #[error("xdr processing error: {0}")] + Xdr(#[from] XdrError), + #[error("Ledger entry not found")] + LedgerEntryNotFound, + #[error(transparent)] + Locator(#[from] locator::Error), + #[error("missing operation result")] + MissingOperationResult, + #[error(transparent)] + Rpc(#[from] rpc::Error), + #[error(transparent)] + Wasm(#[from] wasm::Error), + #[error(transparent)] + Key(#[from] key::Error), + #[error(transparent)] + Extend(#[from] extend::Error), +} + +impl Cmd { + #[allow(clippy::too_many_lines)] + pub async fn run(&self) -> Result<(), Error> { + let expiration_ledger_seq = self.run_against_rpc_server().await?; + + if let Some(ledgers_to_extend) = self.ledgers_to_extend { + extend::Cmd { + key: self.key.clone(), + ledgers_to_extend, + config: self.config.clone(), + fee: self.fee.clone(), + ttl_ledger_only: false, + } + .run() + .await?; + } else { + println!("New ttl ledger: {expiration_ledger_seq}"); + } + + Ok(()) + } + + pub async fn run_against_rpc_server(&self) -> Result { + let network = self.config.get_network()?; + tracing::trace!(?network); + let entry_keys = self.key.parse_keys()?; + let network = &self.config.get_network()?; + let client = Client::new(&network.rpc_url)?; + let key = self.config.key_pair()?; + + // Get the account sequence number + let public_strkey = + stellar_strkey::ed25519::PublicKey(key.verifying_key().to_bytes()).to_string(); + let account_details = client.get_account(&public_strkey).await?; + let sequence: i64 = account_details.seq_num.into(); + + let tx = Transaction { + source_account: MuxedAccount::Ed25519(Uint256(key.verifying_key().to_bytes())), + fee: self.fee.fee, + seq_num: SequenceNumber(sequence + 1), + cond: Preconditions::None, + memo: Memo::None, + operations: vec![Operation { + source_account: None, + body: OperationBody::RestoreFootprint(RestoreFootprintOp { + ext: ExtensionPoint::V0, + }), + }] + .try_into()?, + ext: TransactionExt::V1(SorobanTransactionData { + ext: ExtensionPoint::V0, + resources: SorobanResources { + footprint: LedgerFootprint { + read_only: vec![].try_into()?, + read_write: entry_keys.try_into()?, + }, + instructions: 0, + read_bytes: 0, + write_bytes: 0, + }, + resource_fee: 0, + }), + }; + + let (result, meta, events) = client + .prepare_and_send_transaction(&tx, &key, &[], &network.network_passphrase, None, None) + .await?; + + tracing::trace!(?result); + tracing::trace!(?meta); + if !events.is_empty() { + tracing::info!("Events:\n {events:#?}"); + } + + // The transaction from core will succeed regardless of whether it actually found & + // restored the entry, so we have to inspect the result meta to tell if it worked or not. + let TransactionMeta::V3(TransactionMetaV3 { operations, .. }) = meta else { + return Err(Error::LedgerEntryNotFound); + }; + tracing::debug!("Operations:\nlen:{}\n{operations:#?}", operations.len()); + + // Simply check if there is exactly one entry here. We only support extending a single + // entry via this command (which we should fix separately, but). + if operations.len() == 0 { + return Err(Error::LedgerEntryNotFound); + } + + if operations.len() != 1 { + tracing::warn!( + "Unexpected number of operations: {}. Currently only handle one.", + operations[0].changes.len() + ); + } + parse_operations(&operations).ok_or(Error::MissingOperationResult) + } +} + +fn parse_operations(ops: &[OperationMeta]) -> Option { + ops.first().and_then(|op| { + op.changes.iter().find_map(|entry| match entry { + LedgerEntryChange::Updated(LedgerEntry { + data: + LedgerEntryData::Ttl(TtlEntry { + live_until_ledger_seq, + .. + }), + .. + }) + | LedgerEntryChange::Created(LedgerEntry { + data: + LedgerEntryData::Ttl(TtlEntry { + live_until_ledger_seq, + .. + }), + .. + }) => Some(*live_until_ledger_seq), + _ => None, + }) + }) +} diff --git a/cmd/soroban-cli/src/commands/events.rs b/cmd/soroban-cli/src/commands/events.rs new file mode 100644 index 00000000..aa46bbe2 --- /dev/null +++ b/cmd/soroban-cli/src/commands/events.rs @@ -0,0 +1,221 @@ +use clap::{arg, command, Parser}; +use std::io; + +use soroban_env_host::xdr::{self, Limits, ReadXdr}; + +use super::{config::locator, network}; +use crate::{rpc, utils}; + +#[derive(Parser, Debug, Clone)] +#[group(skip)] +pub struct Cmd { + /// The first ledger sequence number in the range to pull events + /// https://developers.stellar.org/docs/encyclopedia/ledger-headers#ledger-sequence + #[arg(long, conflicts_with = "cursor", required_unless_present = "cursor")] + start_ledger: Option, + /// The cursor corresponding to the start of the event range. + #[arg( + long, + conflicts_with = "start_ledger", + required_unless_present = "start_ledger" + )] + cursor: Option, + /// Output formatting options for event stream + #[arg(long, value_enum, default_value = "pretty")] + output: OutputFormat, + /// The maximum number of events to display (defer to the server-defined limit). + #[arg(short, long, default_value = "10")] + count: usize, + /// A set of (up to 5) contract IDs to filter events on. This parameter can + /// be passed multiple times, e.g. `--id C123.. --id C456..`, or passed with + /// multiple parameters, e.g. `--id C123 C456`. + /// + /// Though the specification supports multiple filter objects (i.e. + /// combinations of type, IDs, and topics), only one set can be specified on + /// the command-line today, though that set can have multiple IDs/topics. + #[arg( + long = "id", + num_args = 1..=6, + help_heading = "FILTERS" + )] + contract_ids: Vec, + /// A set of (up to 4) topic filters to filter event topics on. A single + /// topic filter can contain 1-4 different segment filters, separated by + /// commas, with an asterisk (* character) indicating a wildcard segment. + /// + /// For example, this is one topic filter with two segments: + /// + /// --topic "AAAABQAAAAdDT1VOVEVSAA==,*" + /// + /// This is two topic filters with one and two segments each: + /// + /// --topic "AAAABQAAAAdDT1VOVEVSAA==" --topic '*,*' + /// + /// Note that all of these topic filters are combined with the contract IDs + /// into a single filter (i.e. combination of type, IDs, and topics). + #[arg( + long = "topic", + num_args = 1..=5, + help_heading = "FILTERS" + )] + topic_filters: Vec, + /// Specifies which type of contract events to display. + #[arg( + long = "type", + value_enum, + default_value = "all", + help_heading = "FILTERS" + )] + event_type: rpc::EventType, + #[command(flatten)] + locator: locator::Args, + #[command(flatten)] + network: network::Args, +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("cursor is not valid")] + InvalidCursor, + #[error("filepath does not exist: {path}")] + InvalidFile { path: String }, + #[error("filepath ({path}) cannot be read: {error}")] + CannotReadFile { path: String, error: String }, + #[error("cannot parse topic filter {topic} into 1-4 segments")] + InvalidTopicFilter { topic: String }, + #[error("invalid segment ({segment}) in topic filter ({topic}): {error}")] + InvalidSegment { + topic: String, + segment: String, + error: xdr::Error, + }, + #[error("cannot parse contract ID {contract_id}: {error}")] + InvalidContractId { + contract_id: String, + error: stellar_strkey::DecodeError, + }, + #[error("invalid JSON string: {error} ({debug})")] + InvalidJson { + debug: String, + error: serde_json::Error, + }, + #[error("invalid timestamp in event: {ts}")] + InvalidTimestamp { ts: String }, + #[error("missing start_ledger and cursor")] + MissingStartLedgerAndCursor, + #[error("missing target")] + MissingTarget, + #[error(transparent)] + Rpc(#[from] rpc::Error), + #[error(transparent)] + Generic(#[from] Box), + #[error(transparent)] + Io(#[from] io::Error), + #[error(transparent)] + Xdr(#[from] xdr::Error), + #[error(transparent)] + Serde(#[from] serde_json::Error), + #[error(transparent)] + Network(#[from] network::Error), + #[error(transparent)] + Locator(#[from] locator::Error), +} + +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, clap::ValueEnum)] +pub enum OutputFormat { + /// Colorful, human-oriented console output + Pretty, + /// Human-oriented console output without colors + Plain, + /// JSONified console output + Json, +} + +impl Cmd { + pub async fn run(&mut self) -> Result<(), Error> { + // Validate that topics are made up of segments. + for topic in &self.topic_filters { + for (i, segment) in topic.split(',').enumerate() { + if i > 4 { + return Err(Error::InvalidTopicFilter { + topic: topic.to_string(), + }); + } + + if segment != "*" { + if let Err(e) = xdr::ScVal::from_xdr_base64(segment, Limits::none()) { + return Err(Error::InvalidSegment { + topic: topic.to_string(), + segment: segment.to_string(), + error: e, + }); + } + } + } + } + + // Validate contract_ids + for id in &mut self.contract_ids { + utils::contract_id_from_str(id).map_err(|e| Error::InvalidContractId { + contract_id: id.clone(), + error: e, + })?; + } + + let response = self.run_against_rpc_server().await?; + + for event in &response.events { + match self.output { + // Should we pretty-print the JSON like we're doing here or just + // dump an event in raw JSON on each line? The latter is easier + // to consume programmatically. + OutputFormat::Json => { + println!( + "{}", + serde_json::to_string_pretty(&event).map_err(|e| { + Error::InvalidJson { + debug: format!("{event:#?}"), + error: e, + } + })?, + ); + } + OutputFormat::Plain => println!("{event}"), + OutputFormat::Pretty => event.pretty_print()?, + } + } + println!("Latest Ledger: {}", response.latest_ledger); + + Ok(()) + } + + async fn run_against_rpc_server(&self) -> Result { + let start = self.start()?; + let network = self.network.get(&self.locator)?; + + let client = rpc::Client::new(&network.rpc_url)?; + client + .verify_network_passphrase(Some(&network.network_passphrase)) + .await?; + client + .get_events( + start, + Some(self.event_type), + &self.contract_ids, + &self.topic_filters, + Some(self.count), + ) + .await + .map_err(Error::Rpc) + } + + fn start(&self) -> Result { + let start = match (self.start_ledger, self.cursor.clone()) { + (Some(start), _) => rpc::EventStart::Ledger(start), + (_, Some(c)) => rpc::EventStart::Cursor(c), + // should never happen because of required_unless_present flags + _ => return Err(Error::MissingStartLedgerAndCursor), + }; + Ok(start) + } +} diff --git a/cmd/soroban-cli/src/commands/global.rs b/cmd/soroban-cli/src/commands/global.rs new file mode 100644 index 00000000..c606bd1b --- /dev/null +++ b/cmd/soroban-cli/src/commands/global.rs @@ -0,0 +1,61 @@ +use clap::arg; +use std::path::PathBuf; + +use super::config; + +#[derive(Debug, clap::Args, Clone, Default)] +#[group(skip)] +#[allow(clippy::struct_excessive_bools)] +pub struct Args { + #[clap(flatten)] + pub locator: config::locator::Args, + + /// Filter logs output. To turn on "soroban_cli::log::footprint=debug" or off "=off". Can also use env var `RUST_LOG`. + #[arg(long, short = 'f')] + pub filter_logs: Vec, + + /// Do not write logs to stderr including `INFO` + #[arg(long, short = 'q')] + pub quiet: bool, + + /// Log DEBUG events + #[arg(long, short = 'v')] + pub verbose: bool, + + /// Log DEBUG and TRACE events + #[arg(long, visible_alias = "vv")] + pub very_verbose: bool, + + /// List installed plugins. E.g. `soroban-hello` + #[arg(long)] + pub list: bool, +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("reading file {filepath}: {error}")] + CannotReadLedgerFile { + filepath: PathBuf, + error: soroban_ledger_snapshot::Error, + }, + + #[error("committing file {filepath}: {error}")] + CannotCommitLedgerFile { + filepath: PathBuf, + error: soroban_ledger_snapshot::Error, + }, +} + +impl Args { + pub fn log_level(&self) -> Option { + if self.quiet { + None + } else if self.very_verbose { + Some(tracing::Level::TRACE) + } else if self.verbose { + Some(tracing::Level::DEBUG) + } else { + Some(tracing::Level::INFO) + } + } +} diff --git a/cmd/soroban-cli/src/commands/keys/add.rs b/cmd/soroban-cli/src/commands/keys/add.rs new file mode 100644 index 00000000..2868c737 --- /dev/null +++ b/cmd/soroban-cli/src/commands/keys/add.rs @@ -0,0 +1,33 @@ +use clap::command; + +use super::super::config::{locator, secret}; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Secret(#[from] secret::Error), + + #[error(transparent)] + Config(#[from] locator::Error), +} + +#[derive(Debug, clap::Parser, Clone)] +#[group(skip)] +pub struct Cmd { + /// Name of identity + pub name: String, + + #[command(flatten)] + pub secrets: secret::Args, + + #[command(flatten)] + pub config_locator: locator::Args, +} + +impl Cmd { + pub fn run(&self) -> Result<(), Error> { + Ok(self + .config_locator + .write_identity(&self.name, &self.secrets.read_secret()?)?) + } +} diff --git a/cmd/soroban-cli/src/commands/keys/address.rs b/cmd/soroban-cli/src/commands/keys/address.rs new file mode 100644 index 00000000..d13381b4 --- /dev/null +++ b/cmd/soroban-cli/src/commands/keys/address.rs @@ -0,0 +1,54 @@ +use crate::commands::config::secret; + +use super::super::config::locator; +use clap::arg; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Config(#[from] locator::Error), + + #[error(transparent)] + Secret(#[from] secret::Error), + + #[error(transparent)] + StrKey(#[from] stellar_strkey::DecodeError), +} + +#[derive(Debug, clap::Parser, Clone)] +#[group(skip)] +pub struct Cmd { + /// Name of identity to lookup, default test identity used if not provided + pub name: String, + + /// If identity is a seed phrase use this hd path, default is 0 + #[arg(long)] + pub hd_path: Option, + + #[command(flatten)] + pub locator: locator::Args, +} + +impl Cmd { + pub fn run(&self) -> Result<(), Error> { + println!("{}", self.public_key()?); + Ok(()) + } + + pub fn private_key(&self) -> Result { + Ok(self + .locator + .read_identity(&self.name)? + .key_pair(self.hd_path)?) + } + + pub fn public_key(&self) -> Result { + if let Ok(key) = stellar_strkey::ed25519::PublicKey::from_string(&self.name) { + Ok(key) + } else { + Ok(stellar_strkey::ed25519::PublicKey::from_payload( + self.private_key()?.verifying_key().as_bytes(), + )?) + } + } +} diff --git a/cmd/soroban-cli/src/commands/keys/fund.rs b/cmd/soroban-cli/src/commands/keys/fund.rs new file mode 100644 index 00000000..b6c088f1 --- /dev/null +++ b/cmd/soroban-cli/src/commands/keys/fund.rs @@ -0,0 +1,34 @@ +use clap::command; + +use crate::commands::network; + +use super::address; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Address(#[from] address::Error), + #[error(transparent)] + Network(#[from] network::Error), +} + +#[derive(Debug, clap::Parser, Clone)] +#[group(skip)] +pub struct Cmd { + #[command(flatten)] + pub network: network::Args, + /// Address to fund + #[command(flatten)] + pub address: address::Cmd, +} + +impl Cmd { + pub async fn run(&self) -> Result<(), Error> { + let addr = self.address.public_key()?; + self.network + .get(&self.address.locator)? + .fund_address(&addr) + .await?; + Ok(()) + } +} diff --git a/cmd/soroban-cli/src/commands/keys/generate.rs b/cmd/soroban-cli/src/commands/keys/generate.rs new file mode 100644 index 00000000..07782b21 --- /dev/null +++ b/cmd/soroban-cli/src/commands/keys/generate.rs @@ -0,0 +1,75 @@ +use clap::{arg, command}; + +use crate::commands::network; + +use super::super::config::{ + locator, + secret::{self, Secret}, +}; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Config(#[from] locator::Error), + #[error(transparent)] + Secret(#[from] secret::Error), + #[error(transparent)] + Network(#[from] network::Error), +} + +#[derive(Debug, clap::Parser, Clone)] +#[group(skip)] +pub struct Cmd { + /// Name of identity + pub name: String, + /// Do not fund address + #[arg(long)] + pub no_fund: bool, + /// Optional seed to use when generating seed phrase. + /// Random otherwise. + #[arg(long, conflicts_with = "default_seed")] + pub seed: Option, + + /// Output the generated identity as a secret key + #[arg(long, short = 's')] + pub as_secret: bool, + + #[command(flatten)] + pub config_locator: locator::Args, + + /// When generating a secret key, which hd_path should be used from the original seed_phrase. + #[arg(long)] + pub hd_path: Option, + + /// Generate the default seed phrase. Useful for testing. + /// Equivalent to --seed 0000000000000000 + #[arg(long, short = 'd', conflicts_with = "seed")] + pub default_seed: bool, + + #[command(flatten)] + pub network: network::Args, +} + +impl Cmd { + pub async fn run(&self) -> Result<(), Error> { + let seed_phrase = if self.default_seed { + Secret::test_seed_phrase() + } else { + Secret::from_seed(self.seed.as_deref()) + }?; + let secret = if self.as_secret { + seed_phrase.private_key(self.hd_path)?.into() + } else { + seed_phrase + }; + self.config_locator.write_identity(&self.name, &secret)?; + if !self.no_fund { + let addr = secret.public_key(self.hd_path)?; + let network = self.network.get(&self.config_locator)?; + network.fund_address(&addr).await.unwrap_or_else(|_| { + tracing::warn!("Failed to fund address: {addr} on at {}", network.rpc_url); + }); + } + Ok(()) + } +} diff --git a/cmd/soroban-cli/src/commands/keys/ls.rs b/cmd/soroban-cli/src/commands/keys/ls.rs new file mode 100644 index 00000000..bc46ffcd --- /dev/null +++ b/cmd/soroban-cli/src/commands/keys/ls.rs @@ -0,0 +1,45 @@ +use clap::command; + +use super::super::config::locator; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Config(#[from] locator::Error), +} + +#[derive(Debug, clap::Parser, Clone)] +#[group(skip)] +pub struct Cmd { + #[command(flatten)] + pub config_locator: locator::Args, + + #[arg(long, short = 'l')] + pub long: bool, +} + +impl Cmd { + pub fn run(&self) -> Result<(), Error> { + let res = if self.long { self.ls_l() } else { self.ls() }?.join("\n"); + println!("{res}"); + Ok(()) + } + + pub fn ls(&self) -> Result, Error> { + let mut list = self.config_locator.list_identities()?; + let test = "test".to_string(); + if !list.contains(&test) { + list.push(test); + } + Ok(list) + } + + pub fn ls_l(&self) -> Result, Error> { + Ok(self + .config_locator + .list_identities_long()? + .into_iter() + .map(|(name, location)| format!("{location}\nName: {name}\n")) + .collect::>()) + } +} diff --git a/cmd/soroban-cli/src/commands/keys/mod.rs b/cmd/soroban-cli/src/commands/keys/mod.rs new file mode 100644 index 00000000..42814092 --- /dev/null +++ b/cmd/soroban-cli/src/commands/keys/mod.rs @@ -0,0 +1,63 @@ +use clap::Parser; + +pub mod add; +pub mod address; +pub mod fund; +pub mod generate; +pub mod ls; +pub mod rm; +pub mod show; + +#[derive(Debug, Parser)] +pub enum Cmd { + /// Add a new identity (keypair, ledger, macOS keychain) + Add(add::Cmd), + /// Given an identity return its address (public key) + Address(address::Cmd), + /// Fund an identity on a test network + Fund(fund::Cmd), + /// Generate a new identity with a seed phrase, currently 12 words + Generate(generate::Cmd), + /// List identities + Ls(ls::Cmd), + /// Remove an identity + Rm(rm::Cmd), + /// Given an identity return its private key + Show(show::Cmd), +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Add(#[from] add::Error), + + #[error(transparent)] + Address(#[from] address::Error), + #[error(transparent)] + Fund(#[from] fund::Error), + + #[error(transparent)] + Generate(#[from] generate::Error), + #[error(transparent)] + Rm(#[from] rm::Error), + #[error(transparent)] + Ls(#[from] ls::Error), + + #[error(transparent)] + Show(#[from] show::Error), +} + +impl Cmd { + pub async fn run(&self) -> Result<(), Error> { + match self { + Cmd::Add(cmd) => cmd.run()?, + Cmd::Address(cmd) => cmd.run()?, + Cmd::Fund(cmd) => cmd.run().await?, + Cmd::Generate(cmd) => cmd.run().await?, + Cmd::Ls(cmd) => cmd.run()?, + Cmd::Rm(cmd) => cmd.run()?, + Cmd::Show(cmd) => cmd.run()?, + }; + Ok(()) + } +} diff --git a/cmd/soroban-cli/src/commands/keys/rm.rs b/cmd/soroban-cli/src/commands/keys/rm.rs new file mode 100644 index 00000000..df48108d --- /dev/null +++ b/cmd/soroban-cli/src/commands/keys/rm.rs @@ -0,0 +1,25 @@ +use clap::command; + +use super::super::config::locator; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Locator(#[from] locator::Error), +} + +#[derive(Debug, clap::Parser, Clone)] +#[group(skip)] +pub struct Cmd { + /// Identity to remove + pub name: String, + + #[command(flatten)] + pub config: locator::Args, +} + +impl Cmd { + pub fn run(&self) -> Result<(), Error> { + Ok(self.config.remove_identity(&self.name)?) + } +} diff --git a/cmd/soroban-cli/src/commands/keys/show.rs b/cmd/soroban-cli/src/commands/keys/show.rs new file mode 100644 index 00000000..b99478cb --- /dev/null +++ b/cmd/soroban-cli/src/commands/keys/show.rs @@ -0,0 +1,43 @@ +use clap::arg; + +use super::super::config::{locator, secret}; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Config(#[from] locator::Error), + + #[error(transparent)] + Secret(#[from] secret::Error), + + #[error(transparent)] + StrKey(#[from] stellar_strkey::DecodeError), +} + +#[derive(Debug, clap::Parser, Clone)] +#[group(skip)] +pub struct Cmd { + /// Name of identity to lookup, default is test identity + pub name: String, + + /// If identity is a seed phrase use this hd path, default is 0 + #[arg(long)] + pub hd_path: Option, + + #[command(flatten)] + pub locator: locator::Args, +} + +impl Cmd { + pub fn run(&self) -> Result<(), Error> { + println!("{}", self.private_key()?.to_string()); + Ok(()) + } + + pub fn private_key(&self) -> Result { + Ok(self + .locator + .read_identity(&self.name)? + .private_key(self.hd_path)?) + } +} diff --git a/cmd/soroban-cli/src/commands/lab/mod.rs b/cmd/soroban-cli/src/commands/lab/mod.rs new file mode 100644 index 00000000..f405efe6 --- /dev/null +++ b/cmd/soroban-cli/src/commands/lab/mod.rs @@ -0,0 +1,31 @@ +use clap::Subcommand; +use stellar_xdr::cli as xdr; + +pub mod token; + +#[derive(Debug, Subcommand)] +pub enum Cmd { + /// Wrap, create, and manage token contracts + Token(token::Root), + + /// Decode xdr + Xdr(xdr::Root), +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Token(#[from] token::Error), + #[error(transparent)] + Xdr(#[from] xdr::Error), +} + +impl Cmd { + pub async fn run(&self) -> Result<(), Error> { + match &self { + Cmd::Token(token) => token.run().await?, + Cmd::Xdr(xdr) => xdr.run()?, + } + Ok(()) + } +} diff --git a/cmd/soroban-cli/src/commands/lab/token/mod.rs b/cmd/soroban-cli/src/commands/lab/token/mod.rs new file mode 100644 index 00000000..bd7eacf3 --- /dev/null +++ b/cmd/soroban-cli/src/commands/lab/token/mod.rs @@ -0,0 +1,38 @@ +use std::fmt::Debug; + +use crate::commands::contract::{deploy, id}; +use clap::{Parser, Subcommand}; + +#[derive(Parser, Debug)] +pub struct Root { + #[clap(subcommand)] + cmd: Cmd, +} + +#[derive(Subcommand, Debug)] +enum Cmd { + /// Deploy a token contract to wrap an existing Stellar classic asset for smart contract usage + /// Deprecated, use `soroban contract deploy asset` instead + Wrap(deploy::asset::Cmd), + /// Compute the expected contract id for the given asset + /// Deprecated, use `soroban contract id asset` instead + Id(id::asset::Cmd), +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Wrap(#[from] deploy::asset::Error), + #[error(transparent)] + Id(#[from] id::asset::Error), +} + +impl Root { + pub async fn run(&self) -> Result<(), Error> { + match &self.cmd { + Cmd::Wrap(wrap) => wrap.run().await?, + Cmd::Id(id) => id.run()?, + } + Ok(()) + } +} diff --git a/cmd/soroban-cli/src/commands/mod.rs b/cmd/soroban-cli/src/commands/mod.rs new file mode 100644 index 00000000..952869af --- /dev/null +++ b/cmd/soroban-cli/src/commands/mod.rs @@ -0,0 +1,160 @@ +use std::str::FromStr; + +use clap::{command, error::ErrorKind, CommandFactory, FromArgMatches, Parser}; + +pub mod completion; +pub mod config; +pub mod contract; +pub mod events; +pub mod global; +pub mod keys; +pub mod lab; +pub mod network; +pub mod plugin; +pub mod version; + +pub const HEADING_RPC: &str = "Options (RPC)"; +const ABOUT: &str = "Build, deploy, & interact with contracts; set identities to sign with; configure networks; generate keys; and more. + +Intro: https://soroban.stellar.org +CLI Reference: https://github.com/stellar/soroban-tools/tree/main/docs/soroban-cli-full-docs.md"; + +// long_about is shown when someone uses `--help`; short help when using `-h` +const LONG_ABOUT: &str = " + +The easiest way to get started is to generate a new identity: + + soroban config identity generate alice + +You can use identities with the `--source` flag in other commands later. + +Commands that relate to smart contract interactions are organized under the `contract` subcommand. List them: + + soroban contract --help + +A Soroban contract has its interface schema types embedded in the binary that gets deployed on-chain, making it possible to dynamically generate a custom CLI for each. `soroban contract invoke` makes use of this: + + soroban contract invoke --id CCR6QKTWZQYW6YUJ7UP7XXZRLWQPFRV6SWBLQS4ZQOSAF4BOUD77OTE2 --source alice --network testnet -- \ + --help + +Anything after the `--` double dash (the \"slop\") is parsed as arguments to the contract-specific CLI, generated on-the-fly from the embedded schema. For the hello world example, with a function called `hello` that takes one string argument `to`, here's how you invoke it: + + soroban contract invoke --id CCR6QKTWZQYW6YUJ7UP7XXZRLWQPFRV6SWBLQS4ZQOSAF4BOUD77OTE2 --source alice --network testnet -- \ + hello --to world + +Full CLI reference: https://github.com/stellar/soroban-tools/tree/main/docs/soroban-cli-full-docs.md"; + +#[derive(Parser, Debug)] +#[command( + name = "soroban", + about = ABOUT, + version = version::long(), + long_about = ABOUT.to_string() + LONG_ABOUT, + disable_help_subcommand = true, +)] +pub struct Root { + #[clap(flatten)] + pub global_args: global::Args, + + #[command(subcommand)] + pub cmd: Cmd, +} + +impl Root { + pub fn new() -> Result { + Self::try_parse().map_err(|e| { + if std::env::args().any(|s| s == "--list") { + let plugins = plugin::list().unwrap_or_default(); + if plugins.is_empty() { + println!("No Plugins installed. E.g. soroban-hello"); + } else { + println!("Installed Plugins:\n {}", plugins.join("\n ")); + } + std::process::exit(0); + } + match e.kind() { + ErrorKind::InvalidSubcommand => match plugin::run() { + Ok(()) => Error::Clap(e), + Err(e) => Error::Plugin(e), + }, + _ => Error::Clap(e), + } + }) + } + + pub fn from_arg_matches(itr: I) -> Result + where + I: IntoIterator, + T: Into + Clone, + { + Self::from_arg_matches_mut(&mut Self::command().get_matches_from(itr)) + } + pub async fn run(&mut self) -> Result<(), Error> { + match &mut self.cmd { + Cmd::Completion(completion) => completion.run(), + Cmd::Contract(contract) => contract.run(&self.global_args).await?, + Cmd::Events(events) => events.run().await?, + Cmd::Lab(lab) => lab.run().await?, + Cmd::Network(network) => network.run()?, + Cmd::Version(version) => version.run(), + Cmd::Keys(id) => id.run().await?, + Cmd::Config(c) => c.run().await?, + }; + Ok(()) + } +} + +impl FromStr for Root { + type Err = clap::Error; + + fn from_str(s: &str) -> Result { + Self::from_arg_matches(s.split_whitespace()) + } +} + +#[derive(Parser, Debug)] +pub enum Cmd { + /// Print shell completion code for the specified shell. + #[command(long_about = completion::LONG_ABOUT)] + Completion(completion::Cmd), + /// Deprecated, use `soroban keys` and `soroban network` instead + #[command(subcommand)] + Config(config::Cmd), + /// Tools for smart contract developers + #[command(subcommand)] + Contract(contract::Cmd), + /// Watch the network for contract events + Events(events::Cmd), + /// Create and manage identities including keys and addresses + #[command(subcommand)] + Keys(keys::Cmd), + /// Experiment with early features and expert tools + #[command(subcommand)] + Lab(lab::Cmd), + /// Start and configure networks + #[command(subcommand)] + Network(network::Cmd), + /// Print version information + Version(version::Cmd), +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + // TODO: stop using Debug for displaying errors + #[error(transparent)] + Contract(#[from] contract::Error), + #[error(transparent)] + Events(#[from] events::Error), + #[error(transparent)] + Keys(#[from] keys::Error), + #[error(transparent)] + Lab(#[from] lab::Error), + #[error(transparent)] + Config(#[from] config::Error), + #[error(transparent)] + Clap(#[from] clap::error::Error), + #[error(transparent)] + Plugin(#[from] plugin::Error), + #[error(transparent)] + Network(#[from] network::Error), +} diff --git a/cmd/soroban-cli/src/commands/network/add.rs b/cmd/soroban-cli/src/commands/network/add.rs new file mode 100644 index 00000000..b6a2ddd3 --- /dev/null +++ b/cmd/soroban-cli/src/commands/network/add.rs @@ -0,0 +1,32 @@ +use super::super::config::{locator, secret}; +use clap::command; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Secret(#[from] secret::Error), + + #[error(transparent)] + Config(#[from] locator::Error), +} + +#[derive(Debug, clap::Parser, Clone)] +#[group(skip)] +pub struct Cmd { + /// Name of network + pub name: String, + + #[command(flatten)] + pub network: super::Network, + + #[command(flatten)] + pub config_locator: locator::Args, +} + +impl Cmd { + pub fn run(&self) -> Result<(), Error> { + Ok(self + .config_locator + .write_network(&self.name, &self.network)?) + } +} diff --git a/cmd/soroban-cli/src/commands/network/ls.rs b/cmd/soroban-cli/src/commands/network/ls.rs new file mode 100644 index 00000000..cc542b3e --- /dev/null +++ b/cmd/soroban-cli/src/commands/network/ls.rs @@ -0,0 +1,44 @@ +use clap::command; + +use super::locator; +use crate::commands::config::locator::Location; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Config(#[from] locator::Error), +} + +#[derive(Debug, clap::Parser, Clone)] +#[group(skip)] +pub struct Cmd { + #[command(flatten)] + pub config_locator: locator::Args, + /// Get more info about the networks + #[arg(long, short = 'l')] + pub long: bool, +} + +impl Cmd { + pub fn run(&self) -> Result<(), Error> { + let res = if self.long { self.ls_l() } else { self.ls() }?.join("\n"); + println!("{res}"); + Ok(()) + } + + pub fn ls(&self) -> Result, Error> { + Ok(self.config_locator.list_networks()?) + } + + pub fn ls_l(&self) -> Result, Error> { + Ok(self + .config_locator + .list_networks_long()? + .iter() + .filter_map(|(name, network, location)| { + (!self.config_locator.global || matches!(location, Location::Global(_))) + .then(|| Some(format!("{location}\nName: {name}\n{network:#?}\n")))? + }) + .collect()) + } +} diff --git a/cmd/soroban-cli/src/commands/network/mod.rs b/cmd/soroban-cli/src/commands/network/mod.rs new file mode 100644 index 00000000..22cba190 --- /dev/null +++ b/cmd/soroban-cli/src/commands/network/mod.rs @@ -0,0 +1,197 @@ +use std::str::FromStr; + +use clap::{arg, Parser}; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use stellar_strkey::ed25519::PublicKey; + +use crate::{ + commands::HEADING_RPC, + rpc::{self, Client}, +}; + +use super::config::locator; + +pub mod add; +pub mod ls; +pub mod rm; + +#[derive(Debug, Parser)] +pub enum Cmd { + /// Add a new network + Add(add::Cmd), + /// Remove a network + Rm(rm::Cmd), + /// List networks + Ls(ls::Cmd), +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Add(#[from] add::Error), + + #[error(transparent)] + Rm(#[from] rm::Error), + + #[error(transparent)] + Ls(#[from] ls::Error), + + #[error(transparent)] + Config(#[from] locator::Error), + + #[error("network arg or rpc url and network passphrase are required if using the network")] + Network, + #[error(transparent)] + Rpc(#[from] rpc::Error), + #[error(transparent)] + Hyper(#[from] hyper::Error), + #[error("Failed to parse JSON from {0}, {1}")] + FailedToParseJSON(String, serde_json::Error), + #[error("Invalid URL {0}")] + InvalidUrl(String), + #[error("Inproper response {0}")] + InproperResponse(String), + #[error("Currently not supported on windows. Please visit:\n{0}")] + WindowsNotSupported(String), +} + +impl Cmd { + pub fn run(&self) -> Result<(), Error> { + match self { + Cmd::Add(cmd) => cmd.run()?, + Cmd::Rm(new) => new.run()?, + Cmd::Ls(cmd) => cmd.run()?, + }; + Ok(()) + } +} + +#[derive(Debug, clap::Args, Clone, Default)] +#[group(skip)] +pub struct Args { + /// RPC server endpoint + #[arg( + long = "rpc-url", + requires = "network_passphrase", + required_unless_present = "network", + env = "SOROBAN_RPC_URL", + help_heading = HEADING_RPC, + )] + pub rpc_url: Option, + /// Network passphrase to sign the transaction sent to the rpc server + #[arg( + long = "network-passphrase", + requires = "rpc_url", + required_unless_present = "network", + env = "SOROBAN_NETWORK_PASSPHRASE", + help_heading = HEADING_RPC, + )] + pub network_passphrase: Option, + /// Name of network to use from config + #[arg( + long, + required_unless_present = "rpc_url", + env = "SOROBAN_NETWORK", + help_heading = HEADING_RPC, + )] + pub network: Option, +} + +impl Args { + pub fn get(&self, locator: &locator::Args) -> Result { + if let Some(name) = self.network.as_deref() { + if let Ok(network) = locator.read_network(name) { + return Ok(network); + } + } + if let (Some(rpc_url), Some(network_passphrase)) = + (self.rpc_url.clone(), self.network_passphrase.clone()) + { + Ok(Network { + rpc_url, + network_passphrase, + }) + } else { + Err(Error::Network) + } + } +} + +#[derive(Debug, clap::Args, Serialize, Deserialize, Clone)] +#[group(skip)] +pub struct Network { + /// RPC server endpoint + #[arg( + long = "rpc-url", + env = "SOROBAN_RPC_URL", + help_heading = HEADING_RPC, + )] + pub rpc_url: String, + /// Network passphrase to sign the transaction sent to the rpc server + #[arg( + long, + env = "SOROBAN_NETWORK_PASSPHRASE", + help_heading = HEADING_RPC, + )] + pub network_passphrase: String, +} + +impl Network { + pub async fn helper_url(&self, addr: &str) -> Result { + tracing::debug!("address {addr:?}"); + let client = Client::new(&self.rpc_url)?; + let helper_url_root = client.friendbot_url().await?; + let uri = http::Uri::from_str(&helper_url_root) + .map_err(|_| Error::InvalidUrl(helper_url_root.to_string()))?; + http::Uri::from_str(&format!("{uri:?}?addr={addr}")) + .map_err(|_| Error::InvalidUrl(helper_url_root.to_string())) + } + + #[allow(clippy::similar_names)] + pub async fn fund_address(&self, addr: &PublicKey) -> Result<(), Error> { + let uri = self.helper_url(&addr.to_string()).await?; + tracing::debug!("URL {uri:?}"); + let response = match uri.scheme_str() { + Some("http") => hyper::Client::new().get(uri.clone()).await?, + Some("https") => { + #[cfg(target_os = "windows")] + { + return Err(Error::WindowsNotSupported(uri.to_string())); + } + #[cfg(not(target_os = "windows"))] + { + let https = hyper_tls::HttpsConnector::new(); + hyper::Client::builder() + .build::<_, hyper::Body>(https) + .get(uri.clone()) + .await? + } + } + _ => { + return Err(Error::InvalidUrl(uri.to_string())); + } + }; + let body = hyper::body::to_bytes(response.into_body()).await?; + let res = serde_json::from_slice::(&body) + .map_err(|e| Error::FailedToParseJSON(uri.to_string(), e))?; + tracing::debug!("{res:#?}"); + if let Some(detail) = res.get("detail").and_then(Value::as_str) { + if detail.contains("createAccountAlreadyExist") { + tracing::warn!("Account already exists"); + } + } else if res.get("successful").is_none() { + return Err(Error::InproperResponse(res.to_string())); + } + Ok(()) + } +} + +impl Network { + pub fn futurenet() -> Self { + Network { + rpc_url: "https://rpc-futurenet.stellar.org:443".to_owned(), + network_passphrase: "Test SDF Future Network ; October 2022".to_owned(), + } + } +} diff --git a/cmd/soroban-cli/src/commands/network/rm.rs b/cmd/soroban-cli/src/commands/network/rm.rs new file mode 100644 index 00000000..7051dc6b --- /dev/null +++ b/cmd/soroban-cli/src/commands/network/rm.rs @@ -0,0 +1,24 @@ +use super::locator; +use clap::command; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Locator(#[from] locator::Error), +} + +#[derive(Debug, clap::Parser, Clone)] +#[group(skip)] +pub struct Cmd { + /// Network to remove + pub name: String, + + #[command(flatten)] + pub config: locator::Args, +} + +impl Cmd { + pub fn run(&self) -> Result<(), Error> { + Ok(self.config.remove_network(&self.name)?) + } +} diff --git a/cmd/soroban-cli/src/commands/plugin.rs b/cmd/soroban-cli/src/commands/plugin.rs new file mode 100644 index 00000000..27c191f0 --- /dev/null +++ b/cmd/soroban-cli/src/commands/plugin.rs @@ -0,0 +1,96 @@ +use std::process::Command; + +use clap::CommandFactory; +use which::which; + +use crate::{utils, Root}; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("Plugin not provided. Should be `soroban plugin` for a binary `soroban-plugin`")] + MissingSubcommand, + #[error(transparent)] + IO(#[from] std::io::Error), + #[error( + r#"error: no such command: `{0}` + + {1}View all installed plugins with `soroban --list`"# + )] + ExecutableNotFound(String, String), + #[error(transparent)] + Which(#[from] which::Error), + #[error(transparent)] + Regex(#[from] regex::Error), +} + +const SUBCOMMAND_TOLERANCE: f64 = 0.75; +const PLUGIN_TOLERANCE: f64 = 0.75; +const MIN_LENGTH: usize = 4; + +/// Tries to run a plugin, if the plugin's name is similar enough to any of the current subcommands return Ok. +/// Otherwise only errors can be returned because this process will exit with the plugin. +pub fn run() -> Result<(), Error> { + let (name, args) = { + let mut args = std::env::args().skip(1); + let name = args.next().ok_or(Error::MissingSubcommand)?; + (name, args) + }; + + if Root::command().get_subcommands().any(|c| { + let sc_name = c.get_name(); + sc_name.starts_with(&name) + || (name.len() >= MIN_LENGTH && strsim::jaro(sc_name, &name) >= SUBCOMMAND_TOLERANCE) + }) { + return Ok(()); + } + + let bin = which(format!("soroban-{name}")).map_err(|_| { + let suggestion = if let Ok(bins) = list() { + let suggested_name = bins + .iter() + .map(|b| (b, strsim::jaro_winkler(&name, b))) + .filter(|(_, i)| *i > PLUGIN_TOLERANCE) + .min_by(|a, b| a.1.total_cmp(&b.1)) + .map(|(a, _)| a.to_string()) + .unwrap_or_default(); + if suggested_name.is_empty() { + suggested_name + } else { + format!( + r#"Did you mean `{suggested_name}`? + "# + ) + } + } else { + String::new() + }; + Error::ExecutableNotFound(name, suggestion) + })?; + std::process::exit( + Command::new(bin) + .args(args) + .spawn()? + .wait()? + .code() + .unwrap(), + ); +} + +const MAX_HEX_LENGTH: usize = 10; + +pub fn list() -> Result, Error> { + let re_str = if cfg!(target_os = "windows") { + r"^soroban-.*.exe$" + } else { + r"^soroban-.*" + }; + let re = regex::Regex::new(re_str)?; + Ok(which::which_re(re)? + .filter_map(|b| { + let s = b.file_name()?.to_str()?; + Some(s.strip_suffix(".exe").unwrap_or(s).to_string()) + }) + .filter(|s| !(utils::is_hex_string(s) && s.len() > MAX_HEX_LENGTH)) + .map(|s| s.replace("soroban-", "")) + .collect()) +} diff --git a/cmd/soroban-cli/src/commands/version.rs b/cmd/soroban-cli/src/commands/version.rs new file mode 100644 index 00000000..d9fc091b --- /dev/null +++ b/cmd/soroban-cli/src/commands/version.rs @@ -0,0 +1,32 @@ +use clap::Parser; +use soroban_env_host::meta; +use std::fmt::Debug; + +const GIT_REVISION: &str = env!("GIT_REVISION"); + +#[derive(Parser, Debug, Clone)] +#[group(skip)] +pub struct Cmd; + +impl Cmd { + #[allow(clippy::unused_self)] + pub fn run(&self) { + println!("soroban {}", long()); + } +} + +pub fn long() -> String { + let env = soroban_env_host::VERSION; + let xdr = soroban_env_host::VERSION.xdr; + [ + format!("{} ({GIT_REVISION})", env!("CARGO_PKG_VERSION")), + format!("soroban-env {} ({})", env.pkg, env.rev), + format!("soroban-env interface version {}", meta::INTERFACE_VERSION), + format!( + "stellar-xdr {} ({}) +xdr curr ({})", + xdr.pkg, xdr.rev, xdr.xdr_curr, + ), + ] + .join("\n") +} diff --git a/cmd/soroban-cli/src/fee.rs b/cmd/soroban-cli/src/fee.rs new file mode 100644 index 00000000..ee8b9614 --- /dev/null +++ b/cmd/soroban-cli/src/fee.rs @@ -0,0 +1,16 @@ +use crate::commands::HEADING_RPC; +use clap::arg; + +#[derive(Debug, clap::Args, Clone)] +#[group(skip)] +pub struct Args { + /// fee amount for transaction, in stroops. 1 stroop = 0.0000001 xlm + #[arg(long, default_value = "100", env = "SOROBAN_FEE", help_heading = HEADING_RPC)] + pub fee: u32, +} + +impl Default for Args { + fn default() -> Self { + Self { fee: 100 } + } +} diff --git a/cmd/soroban-cli/src/key.rs b/cmd/soroban-cli/src/key.rs new file mode 100644 index 00000000..e9901abd --- /dev/null +++ b/cmd/soroban-cli/src/key.rs @@ -0,0 +1,110 @@ +use clap::arg; +use soroban_env_host::xdr::{ + self, LedgerKey, LedgerKeyContractCode, LedgerKeyContractData, Limits, ReadXdr, ScAddress, + ScVal, +}; +use std::path::PathBuf; + +use crate::{ + commands::contract::Durability, + utils::{self}, + wasm, +}; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Spec(#[from] soroban_spec_tools::Error), + #[error(transparent)] + Xdr(#[from] xdr::Error), + #[error("cannot parse contract ID {0}: {1}")] + CannotParseContractId(String, stellar_strkey::DecodeError), + #[error(transparent)] + Wasm(#[from] wasm::Error), +} + +#[derive(Debug, clap::Args, Clone)] +#[group(skip)] +pub struct Args { + /// Contract ID to which owns the data entries. + /// If no keys provided the Contract's instance will be extended + #[arg( + long = "id", + required_unless_present = "wasm", + required_unless_present = "wasm_hash" + )] + pub contract_id: Option, + /// Storage key (symbols only) + #[arg(long = "key", conflicts_with = "key_xdr")] + pub key: Option>, + /// Storage key (base64-encoded XDR) + #[arg(long = "key-xdr", conflicts_with = "key")] + pub key_xdr: Option>, + /// Path to Wasm file of contract code to extend + #[arg( + long, + conflicts_with = "contract_id", + conflicts_with = "key", + conflicts_with = "key_xdr", + conflicts_with = "wasm_hash" + )] + pub wasm: Option, + /// Path to Wasm file of contract code to extend + #[arg( + long, + conflicts_with = "contract_id", + conflicts_with = "key", + conflicts_with = "key_xdr", + conflicts_with = "wasm" + )] + pub wasm_hash: Option, + /// Storage entry durability + #[arg(long, value_enum, required = true)] + pub durability: Durability, +} + +impl Args { + pub fn parse_keys(&self) -> Result, Error> { + let keys = if let Some(keys) = &self.key { + keys.iter() + .map(|key| { + Ok(soroban_spec_tools::from_string_primitive( + key, + &xdr::ScSpecTypeDef::Symbol, + )?) + }) + .collect::, Error>>()? + } else if let Some(keys) = &self.key_xdr { + keys.iter() + .map(|s| Ok(ScVal::from_xdr_base64(s, Limits::none())?)) + .collect::, Error>>()? + } else if let Some(wasm) = &self.wasm { + return Ok(vec![crate::wasm::Args { wasm: wasm.clone() }.try_into()?]); + } else if let Some(wasm_hash) = &self.wasm_hash { + return Ok(vec![LedgerKey::ContractCode(LedgerKeyContractCode { + hash: xdr::Hash( + utils::contract_id_from_str(wasm_hash) + .map_err(|e| Error::CannotParseContractId(wasm_hash.clone(), e))?, + ), + })]); + } else { + vec![ScVal::LedgerKeyContractInstance] + }; + let contract_id = contract_id(self.contract_id.as_ref().unwrap())?; + + Ok(keys + .into_iter() + .map(|key| { + LedgerKey::ContractData(LedgerKeyContractData { + contract: ScAddress::Contract(xdr::Hash(contract_id)), + durability: (&self.durability).into(), + key, + }) + }) + .collect()) + } +} + +fn contract_id(s: &str) -> Result<[u8; 32], Error> { + utils::contract_id_from_str(s).map_err(|e| Error::CannotParseContractId(s.to_string(), e)) +} diff --git a/cmd/soroban-cli/src/lib.rs b/cmd/soroban-cli/src/lib.rs new file mode 100644 index 00000000..3aad487c --- /dev/null +++ b/cmd/soroban-cli/src/lib.rs @@ -0,0 +1,53 @@ +#![allow( + clippy::missing_errors_doc, + clippy::must_use_candidate, + clippy::missing_panics_doc +)] +pub mod commands; +pub mod fee; +pub mod key; +pub mod log; +pub mod rpc; +pub mod toid; +pub mod utils; +pub mod wasm; + +use std::path::Path; + +pub use commands::Root; + +pub fn parse_cmd(s: &str) -> Result +where + T: clap::CommandFactory + clap::FromArgMatches, +{ + let input = shlex::split(s).ok_or_else(|| { + clap::Error::raw( + clap::error::ErrorKind::InvalidValue, + format!("Invalid input for command:\n{s}"), + ) + })?; + T::from_arg_matches_mut(&mut T::command().no_binary_name(true).get_matches_from(input)) +} + +pub trait CommandParser { + fn parse(s: &str) -> Result; + + fn parse_arg_vec(s: &[&str]) -> Result; +} + +impl CommandParser for T +where + T: clap::CommandFactory + clap::FromArgMatches, +{ + fn parse(s: &str) -> Result { + parse_cmd(s) + } + + fn parse_arg_vec(args: &[&str]) -> Result { + T::from_arg_matches_mut(&mut T::command().no_binary_name(true).get_matches_from(args)) + } +} + +pub trait Pwd { + fn set_pwd(&mut self, pwd: &Path); +} diff --git a/cmd/soroban-cli/src/log.rs b/cmd/soroban-cli/src/log.rs new file mode 100644 index 00000000..16121982 --- /dev/null +++ b/cmd/soroban-cli/src/log.rs @@ -0,0 +1,13 @@ +pub mod auth; +pub mod budget; +pub mod cost; +pub mod diagnostic_event; +pub mod footprint; +pub mod host_event; + +pub use auth::*; +pub use budget::*; +pub use cost::*; +pub use diagnostic_event::*; +pub use footprint::*; +pub use host_event::*; diff --git a/cmd/soroban-cli/src/log/auth.rs b/cmd/soroban-cli/src/log/auth.rs new file mode 100644 index 00000000..c37e7ed3 --- /dev/null +++ b/cmd/soroban-cli/src/log/auth.rs @@ -0,0 +1,7 @@ +use soroban_env_host::xdr::{SorobanAuthorizationEntry, VecM}; + +pub fn auth(auth: &[VecM]) { + if !auth.is_empty() { + tracing::debug!("{auth:#?}"); + } +} diff --git a/cmd/soroban-cli/src/log/budget.rs b/cmd/soroban-cli/src/log/budget.rs new file mode 100644 index 00000000..59ff4aad --- /dev/null +++ b/cmd/soroban-cli/src/log/budget.rs @@ -0,0 +1,5 @@ +use soroban_env_host::budget::Budget; + +pub fn budget(budget: &Budget) { + tracing::debug!("{budget:#?}"); +} diff --git a/cmd/soroban-cli/src/log/cost.rs b/cmd/soroban-cli/src/log/cost.rs new file mode 100644 index 00000000..3e049a6c --- /dev/null +++ b/cmd/soroban-cli/src/log/cost.rs @@ -0,0 +1,27 @@ +use soroban_env_host::xdr::SorobanResources; +use std::fmt::{Debug, Display}; + +struct Cost<'a>(&'a SorobanResources); + +impl Debug for Cost<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + // TODO: Should we output the footprint here? + writeln!(f, "==================== Cost ====================")?; + writeln!(f, "CPU used: {}", self.0.instructions,)?; + writeln!(f, "Bytes read: {}", self.0.read_bytes,)?; + writeln!(f, "Bytes written: {}", self.0.write_bytes,)?; + writeln!(f, "==============================================")?; + Ok(()) + } +} + +impl Display for Cost<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Debug::fmt(self, f) + } +} + +pub fn cost(resources: &SorobanResources) { + let cost = Cost(resources); + tracing::debug!(?cost); +} diff --git a/cmd/soroban-cli/src/log/diagnostic_event.rs b/cmd/soroban-cli/src/log/diagnostic_event.rs new file mode 100644 index 00000000..68af67a4 --- /dev/null +++ b/cmd/soroban-cli/src/log/diagnostic_event.rs @@ -0,0 +1,11 @@ +pub fn diagnostic_events(events: &[impl std::fmt::Debug], level: tracing::Level) { + for (i, event) in events.iter().enumerate() { + if level == tracing::Level::TRACE { + tracing::trace!("{i}: {event:#?}"); + } else if level == tracing::Level::INFO { + tracing::info!("{i}: {event:#?}"); + } else if level == tracing::Level::ERROR { + tracing::error!("{i}: {event:#?}"); + } + } +} diff --git a/cmd/soroban-cli/src/log/footprint.rs b/cmd/soroban-cli/src/log/footprint.rs new file mode 100644 index 00000000..bfbc9f7a --- /dev/null +++ b/cmd/soroban-cli/src/log/footprint.rs @@ -0,0 +1,5 @@ +use soroban_env_host::xdr::LedgerFootprint; + +pub fn footprint(footprint: &LedgerFootprint) { + tracing::debug!("{footprint:#?}"); +} diff --git a/cmd/soroban-cli/src/log/host_event.rs b/cmd/soroban-cli/src/log/host_event.rs new file mode 100644 index 00000000..4238a74c --- /dev/null +++ b/cmd/soroban-cli/src/log/host_event.rs @@ -0,0 +1,7 @@ +use soroban_env_host::events::HostEvent; + +pub fn host_events(events: &[HostEvent]) { + for (i, event) in events.iter().enumerate() { + tracing::info!("{i}: {event:#?}"); + } +} diff --git a/cmd/soroban-cli/src/rpc/fixtures/event_response.json b/cmd/soroban-cli/src/rpc/fixtures/event_response.json new file mode 100644 index 00000000..6f520fdf --- /dev/null +++ b/cmd/soroban-cli/src/rpc/fixtures/event_response.json @@ -0,0 +1,39 @@ +{ + "events": [{ + "eventType": "system", + "ledger": "43601283", + "ledgerClosedAt": "2022-11-16T16:10:41Z", + "contractId": "CDR6QKTWZQYW6YUJ7UP7XXZRLWQPFRV6SWBLQS4ZQOSAF4BOUD77OO5Z", + "id": "0164090849041387521-0000000003", + "pagingToken": "164090849041387521-3", + "topic": [ + "AAAABQAAAAh0cmFuc2Zlcg==", + "AAAAAQB6Mcc=" + ], + "value": "AAAABQAAAApHaWJNb255UGxzAAA=" + }, { + "eventType": "contract", + "ledger": "43601284", + "ledgerClosedAt": "2022-11-16T16:10:41Z", + "contractId": "CDR6QKTWZQYW6YUJ7UP7XXZRLWQPFRV6SWBLQS4ZQOSAF4BOUD77OO5Z", + "id": "0164090849041387521-0000000003", + "pagingToken": "164090849041387521-3", + "topic": [ + "AAAABQAAAAh0cmFuc2Zlcg==", + "AAAAAQB6Mcc=" + ], + "value": "AAAABQAAAApHaWJNb255UGxzAAA=" + }, { + "eventType": "system", + "ledger": "43601285", + "ledgerClosedAt": "2022-11-16T16:10:41Z", + "contractId": "CCR6QKTWZQYW6YUJ7UP7XXZRLWQPFRV6SWBLQS4ZQOSAF4BOUD77OTE2", + "id": "0164090849041387521-0000000003", + "pagingToken": "164090849041387521-3", + "topic": [ + "AAAABQAAAAh0cmFuc2Zlcg==", + "AAAAAQB6Mcc=" + ], + "value": "AAAABQAAAApHaWJNb255UGxzAAA=" + }] +} \ No newline at end of file diff --git a/cmd/soroban-cli/src/rpc/mod.rs b/cmd/soroban-cli/src/rpc/mod.rs new file mode 100644 index 00000000..53542629 --- /dev/null +++ b/cmd/soroban-cli/src/rpc/mod.rs @@ -0,0 +1,1141 @@ +use http::{uri::Authority, Uri}; +use itertools::Itertools; +use jsonrpsee_core::params::ObjectParams; +use jsonrpsee_core::{self, client::ClientT, rpc_params}; +use jsonrpsee_http_client::{HeaderMap, HttpClient, HttpClientBuilder}; +use serde_aux::prelude::{ + deserialize_default_from_null, deserialize_number_from_string, + deserialize_option_number_from_string, +}; +use soroban_env_host::xdr::{ + self, AccountEntry, AccountId, ContractDataEntry, DiagnosticEvent, Error as XdrError, + LedgerEntryData, LedgerFootprint, LedgerKey, LedgerKeyAccount, Limited, PublicKey, ReadXdr, + SorobanAuthorizationEntry, SorobanResources, SorobanTransactionData, Transaction, + TransactionEnvelope, TransactionMeta, TransactionMetaV3, TransactionResult, Uint256, VecM, + WriteXdr, +}; +use soroban_sdk::token; +use soroban_sdk::xdr::Limits; +use std::{ + fmt::Display, + str::FromStr, + time::{Duration, Instant}, +}; +use termcolor::{Color, ColorChoice, StandardStream, WriteColor}; +use termcolor_output::colored; +use tokio::time::sleep; + +use crate::utils::contract_spec; + +mod txn; + +const VERSION: Option<&str> = option_env!("CARGO_PKG_VERSION"); + +pub type LogEvents = fn( + footprint: &LedgerFootprint, + auth: &[VecM], + events: &[DiagnosticEvent], +) -> (); + +pub type LogResources = fn(resources: &SorobanResources) -> (); + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + InvalidAddress(#[from] stellar_strkey::DecodeError), + #[error("invalid response from server")] + InvalidResponse, + #[error("provided network passphrase {expected:?} does not match the server: {server:?}")] + InvalidNetworkPassphrase { expected: String, server: String }, + #[error("xdr processing error: {0}")] + Xdr(#[from] XdrError), + #[error("invalid rpc url: {0}")] + InvalidRpcUrl(http::uri::InvalidUri), + #[error("invalid rpc url: {0}")] + InvalidRpcUrlFromUriParts(http::uri::InvalidUriParts), + #[error("invalid friendbot url: {0}")] + InvalidUrl(String), + #[error("jsonrpc error: {0}")] + JsonRpc(#[from] jsonrpsee_core::Error), + #[error("json decoding error: {0}")] + Serde(#[from] serde_json::Error), + #[error("transaction failed: {0}")] + TransactionFailed(String), + #[error("transaction submission failed: {0}")] + TransactionSubmissionFailed(String), + #[error("expected transaction status: {0}")] + UnexpectedTransactionStatus(String), + #[error("transaction submission timeout")] + TransactionSubmissionTimeout, + #[error("transaction simulation failed: {0}")] + TransactionSimulationFailed(String), + #[error("{0} not found: {1}")] + NotFound(String, String), + #[error("Missing result in successful response")] + MissingResult, + #[error("Failed to read Error response from server")] + MissingError, + #[error("Missing signing key for account {address}")] + MissingSignerForAddress { address: String }, + #[error("cursor is not valid")] + InvalidCursor, + #[error("unexpected ({length}) simulate transaction result length")] + UnexpectedSimulateTransactionResultSize { length: usize }, + #[error("unexpected ({count}) number of operations")] + UnexpectedOperationCount { count: usize }, + #[error("Transaction contains unsupported operation type")] + UnsupportedOperationType, + #[error("unexpected contract code data type: {0:?}")] + UnexpectedContractCodeDataType(LedgerEntryData), + #[error(transparent)] + CouldNotParseContractSpec(#[from] contract_spec::Error), + #[error("unexpected contract code got token")] + UnexpectedToken(ContractDataEntry), + #[error(transparent)] + Spec(#[from] soroban_spec::read::FromWasmError), + #[error(transparent)] + SpecBase64(#[from] soroban_spec::read::ParseSpecBase64Error), + #[error("Fee was too large {0}")] + LargeFee(u64), + #[error("Cannot authorize raw transactions")] + CannotAuthorizeRawTransaction, +} + +#[derive(serde::Deserialize, serde::Serialize, Debug)] +pub struct SendTransactionResponse { + pub hash: String, + pub status: String, + #[serde( + rename = "errorResultXdr", + skip_serializing_if = "Option::is_none", + default + )] + pub error_result_xdr: Option, + #[serde(rename = "latestLedger")] + pub latest_ledger: u32, + #[serde( + rename = "latestLedgerCloseTime", + deserialize_with = "deserialize_number_from_string" + )] + pub latest_ledger_close_time: u32, +} + +#[derive(serde::Deserialize, serde::Serialize, Debug)] +pub struct GetTransactionResponseRaw { + pub status: String, + #[serde( + rename = "envelopeXdr", + skip_serializing_if = "Option::is_none", + default + )] + pub envelope_xdr: Option, + #[serde(rename = "resultXdr", skip_serializing_if = "Option::is_none", default)] + pub result_xdr: Option, + #[serde( + rename = "resultMetaXdr", + skip_serializing_if = "Option::is_none", + default + )] + pub result_meta_xdr: Option, + // TODO: add ledger info and application order +} + +#[derive(serde::Deserialize, serde::Serialize, Debug)] +pub struct GetTransactionResponse { + pub status: String, + pub envelope: Option, + pub result: Option, + pub result_meta: Option, +} + +impl TryInto for GetTransactionResponseRaw { + type Error = xdr::Error; + + fn try_into(self) -> Result { + Ok(GetTransactionResponse { + status: self.status, + envelope: self + .envelope_xdr + .map(|v| ReadXdr::from_xdr_base64(v, Limits::none())) + .transpose()?, + result: self + .result_xdr + .map(|v| ReadXdr::from_xdr_base64(v, Limits::none())) + .transpose()?, + result_meta: self + .result_meta_xdr + .map(|v| ReadXdr::from_xdr_base64(v, Limits::none())) + .transpose()?, + }) + } +} + +#[derive(serde::Deserialize, serde::Serialize, Debug)] +pub struct LedgerEntryResult { + pub key: String, + pub xdr: String, + #[serde(rename = "lastModifiedLedgerSeq")] + pub last_modified_ledger: u32, + #[serde( + rename = "liveUntilLedgerSeq", + skip_serializing_if = "Option::is_none", + deserialize_with = "deserialize_option_number_from_string", + default + )] + pub live_until_ledger_seq_ledger_seq: Option, +} + +#[derive(serde::Deserialize, serde::Serialize, Debug)] +pub struct GetLedgerEntriesResponse { + pub entries: Option>, + #[serde(rename = "latestLedger")] + pub latest_ledger: i64, +} + +#[derive(serde::Deserialize, serde::Serialize, Debug)] +pub struct GetNetworkResponse { + #[serde( + rename = "friendbotUrl", + skip_serializing_if = "Option::is_none", + default + )] + pub friendbot_url: Option, + pub passphrase: String, + #[serde(rename = "protocolVersion")] + pub protocol_version: u32, +} + +#[derive(serde::Deserialize, serde::Serialize, Debug)] +pub struct GetLatestLedgerResponse { + pub id: String, + #[serde(rename = "protocolVersion")] + pub protocol_version: u32, + pub sequence: u32, +} + +#[derive(serde::Deserialize, serde::Serialize, Debug, Default)] +pub struct Cost { + #[serde( + rename = "cpuInsns", + deserialize_with = "deserialize_number_from_string" + )] + pub cpu_insns: u64, + #[serde( + rename = "memBytes", + deserialize_with = "deserialize_number_from_string" + )] + pub mem_bytes: u64, +} + +#[derive(serde::Deserialize, serde::Serialize, Debug)] +pub struct SimulateHostFunctionResultRaw { + #[serde(deserialize_with = "deserialize_default_from_null")] + pub auth: Vec, + pub xdr: String, +} + +#[derive(Debug)] +pub struct SimulateHostFunctionResult { + pub auth: Vec, + pub xdr: xdr::ScVal, +} + +#[derive(serde::Deserialize, serde::Serialize, Debug, Default)] +pub struct SimulateTransactionResponse { + #[serde( + rename = "minResourceFee", + deserialize_with = "deserialize_number_from_string", + default + )] + pub min_resource_fee: u64, + #[serde(default)] + pub cost: Cost, + #[serde(skip_serializing_if = "Vec::is_empty", default)] + pub results: Vec, + #[serde(rename = "transactionData", default)] + pub transaction_data: String, + #[serde( + deserialize_with = "deserialize_default_from_null", + skip_serializing_if = "Vec::is_empty", + default + )] + pub events: Vec, + #[serde( + rename = "restorePreamble", + skip_serializing_if = "Option::is_none", + default + )] + pub restore_preamble: Option, + #[serde(rename = "latestLedger")] + pub latest_ledger: u32, + #[serde(skip_serializing_if = "Option::is_none", default)] + pub error: Option, +} + +impl SimulateTransactionResponse { + pub fn results(&self) -> Result, Error> { + self.results + .iter() + .map(|r| { + Ok(SimulateHostFunctionResult { + auth: r + .auth + .iter() + .map(|a| { + Ok(SorobanAuthorizationEntry::from_xdr_base64( + a, + Limits::none(), + )?) + }) + .collect::>()?, + xdr: xdr::ScVal::from_xdr_base64(&r.xdr, Limits::none())?, + }) + }) + .collect() + } + + pub fn events(&self) -> Result, Error> { + self.events + .iter() + .map(|e| Ok(DiagnosticEvent::from_xdr_base64(e, Limits::none())?)) + .collect() + } + + pub fn transaction_data(&self) -> Result { + Ok(SorobanTransactionData::from_xdr_base64( + &self.transaction_data, + Limits::none(), + )?) + } +} + +#[derive(serde::Deserialize, serde::Serialize, Debug, Default)] +pub struct RestorePreamble { + #[serde(rename = "transactionData")] + pub transaction_data: String, + #[serde( + rename = "minResourceFee", + deserialize_with = "deserialize_number_from_string" + )] + pub min_resource_fee: u64, +} + +#[derive(serde::Deserialize, serde::Serialize, Debug)] +pub struct GetEventsResponse { + #[serde(deserialize_with = "deserialize_default_from_null")] + pub events: Vec, + #[serde(rename = "latestLedger")] + pub latest_ledger: u32, +} + +// Determines whether or not a particular filter matches a topic based on the +// same semantics as the RPC server: +// +// - for an exact segment match, the filter is a base64-encoded ScVal +// - for a wildcard, single-segment match, the string "*" matches exactly one +// segment +// +// The expectation is that a `filter` is a comma-separated list of segments that +// has previously been validated, and `topic` is the list of segments applicable +// for this event. +// +// [API +// Reference](https://docs.google.com/document/d/1TZUDgo_3zPz7TiPMMHVW_mtogjLyPL0plvzGMsxSz6A/edit#bookmark=id.35t97rnag3tx) +// [Code +// Reference](https://github.com/stellar/soroban-tools/blob/bac1be79e8c2590c9c35ad8a0168aab0ae2b4171/cmd/soroban-rpc/internal/methods/get_events.go#L182-L203) +pub fn does_topic_match(topic: &[String], filter: &[String]) -> bool { + filter.len() == topic.len() + && filter + .iter() + .enumerate() + .all(|(i, s)| *s == "*" || topic[i] == *s) +} + +#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)] +pub struct Event { + #[serde(rename = "type")] + pub event_type: String, + + pub ledger: u32, + #[serde(rename = "ledgerClosedAt")] + pub ledger_closed_at: String, + + pub id: String, + #[serde(rename = "pagingToken")] + pub paging_token: String, + + #[serde(rename = "contractId")] + pub contract_id: String, + pub topic: Vec, + pub value: String, +} + +impl Display for Event { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + writeln!( + f, + "Event {} [{}]:", + self.paging_token, + self.event_type.to_ascii_uppercase() + )?; + writeln!( + f, + " Ledger: {} (closed at {})", + self.ledger, self.ledger_closed_at + )?; + writeln!(f, " Contract: {}", self.contract_id)?; + writeln!(f, " Topics:")?; + for topic in &self.topic { + let scval = + xdr::ScVal::from_xdr_base64(topic, Limits::none()).map_err(|_| std::fmt::Error)?; + writeln!(f, " {scval:?}")?; + } + let scval = xdr::ScVal::from_xdr_base64(&self.value, Limits::none()) + .map_err(|_| std::fmt::Error)?; + writeln!(f, " Value: {scval:?}") + } +} + +impl Event { + pub fn parse_cursor(&self) -> Result<(u64, i32), Error> { + parse_cursor(&self.id) + } + + pub fn pretty_print(&self) -> Result<(), Box> { + let mut stdout = StandardStream::stdout(ColorChoice::Auto); + if !stdout.supports_color() { + println!("{self}"); + return Ok(()); + } + + let color = match self.event_type.as_str() { + "system" => Color::Yellow, + _ => Color::Blue, + }; + colored!( + stdout, + "{}Event{} {}{}{} [{}{}{}{}]:\n", + bold!(true), + bold!(false), + fg!(Some(Color::Green)), + self.paging_token, + reset!(), + bold!(true), + fg!(Some(color)), + self.event_type.to_ascii_uppercase(), + reset!(), + )?; + + colored!( + stdout, + " Ledger: {}{}{} (closed at {}{}{})\n", + fg!(Some(Color::Green)), + self.ledger, + reset!(), + fg!(Some(Color::Green)), + self.ledger_closed_at, + reset!(), + )?; + + colored!( + stdout, + " Contract: {}{}{}\n", + fg!(Some(Color::Green)), + self.contract_id, + reset!(), + )?; + + colored!(stdout, " Topics:\n")?; + for topic in &self.topic { + let scval = xdr::ScVal::from_xdr_base64(topic, Limits::none())?; + colored!( + stdout, + " {}{:?}{}\n", + fg!(Some(Color::Green)), + scval, + reset!(), + )?; + } + + let scval = xdr::ScVal::from_xdr_base64(&self.value, Limits::none())?; + colored!( + stdout, + " Value: {}{:?}{}\n", + fg!(Some(Color::Green)), + scval, + reset!(), + )?; + + Ok(()) + } +} + +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, clap::ValueEnum)] +pub enum EventType { + All, + Contract, + System, +} + +#[derive(Clone, Debug, Eq, Hash, PartialEq)] +pub enum EventStart { + Ledger(u32), + Cursor(String), +} + +#[derive(Debug)] +pub struct FullLedgerEntry { + pub key: LedgerKey, + pub val: LedgerEntryData, + pub last_modified_ledger: u32, + pub live_until_ledger_seq: u32, +} + +#[derive(Debug)] +pub struct FullLedgerEntries { + pub entries: Vec, + pub latest_ledger: i64, +} + +pub struct Client { + base_url: String, +} + +impl Client { + pub fn new(base_url: &str) -> Result { + // Add the port to the base URL if there is no port explicitly included + // in the URL and the scheme allows us to infer a default port. + // Jsonrpsee requires a port to always be present even if one can be + // inferred. This may change: https://github.com/paritytech/jsonrpsee/issues/1048. + let uri = base_url.parse::().map_err(Error::InvalidRpcUrl)?; + let mut parts = uri.into_parts(); + if let (Some(scheme), Some(authority)) = (&parts.scheme, &parts.authority) { + if authority.port().is_none() { + let port = match scheme.as_str() { + "http" => Some(80), + "https" => Some(443), + _ => None, + }; + if let Some(port) = port { + let host = authority.host(); + parts.authority = Some( + Authority::from_str(&format!("{host}:{port}")) + .map_err(Error::InvalidRpcUrl)?, + ); + } + } + } + let uri = Uri::from_parts(parts).map_err(Error::InvalidRpcUrlFromUriParts)?; + tracing::trace!(?uri); + Ok(Self { + base_url: uri.to_string(), + }) + } + + fn client(&self) -> Result { + let url = self.base_url.clone(); + let mut headers = HeaderMap::new(); + headers.insert("X-Client-Name", "soroban-cli".parse().unwrap()); + let version = VERSION.unwrap_or("devel"); + headers.insert("X-Client-Version", version.parse().unwrap()); + Ok(HttpClientBuilder::default() + .set_headers(headers) + .build(url)?) + } + + pub async fn friendbot_url(&self) -> Result { + let network = self.get_network().await?; + tracing::trace!("{network:#?}"); + network.friendbot_url.ok_or_else(|| { + Error::NotFound( + "Friendbot".to_string(), + "Friendbot is not available on this network".to_string(), + ) + }) + } + + pub async fn verify_network_passphrase(&self, expected: Option<&str>) -> Result { + let server = self.get_network().await?.passphrase; + if let Some(expected) = expected { + if expected != server { + return Err(Error::InvalidNetworkPassphrase { + expected: expected.to_string(), + server, + }); + } + } + Ok(server) + } + + pub async fn get_network(&self) -> Result { + tracing::trace!("Getting network"); + Ok(self.client()?.request("getNetwork", rpc_params![]).await?) + } + + pub async fn get_latest_ledger(&self) -> Result { + tracing::trace!("Getting latest ledger"); + Ok(self + .client()? + .request("getLatestLedger", rpc_params![]) + .await?) + } + + pub async fn get_account(&self, address: &str) -> Result { + tracing::trace!("Getting address {}", address); + let key = LedgerKey::Account(LedgerKeyAccount { + account_id: AccountId(PublicKey::PublicKeyTypeEd25519(Uint256( + stellar_strkey::ed25519::PublicKey::from_string(address)?.0, + ))), + }); + let keys = Vec::from([key]); + let response = self.get_ledger_entries(&keys).await?; + let entries = response.entries.unwrap_or_default(); + if entries.is_empty() { + return Err(Error::NotFound( + "Account".to_string(), + format!( + r#"{address} +Might need to fund account like: +soroban config identity fund {address} --network +soroban config identity fund {address} --helper-url "# + ), + )); + } + let ledger_entry = &entries[0]; + let mut read = Limited::new(ledger_entry.xdr.as_bytes(), Limits::none()); + if let LedgerEntryData::Account(entry) = LedgerEntryData::read_xdr_base64(&mut read)? { + tracing::trace!(account=?entry); + Ok(entry) + } else { + Err(Error::InvalidResponse) + } + } + + pub async fn send_transaction( + &self, + tx: &TransactionEnvelope, + ) -> Result<(TransactionResult, TransactionMeta, Vec), Error> { + let client = self.client()?; + tracing::trace!("Sending:\n{tx:#?}"); + let SendTransactionResponse { + hash, + error_result_xdr, + status, + .. + } = client + .request( + "sendTransaction", + rpc_params![tx.to_xdr_base64(Limits::none())?], + ) + .await + .map_err(|err| { + Error::TransactionSubmissionFailed(format!("No status yet:\n {err:#?}")) + })?; + + if status == "ERROR" { + let error = error_result_xdr + .ok_or(Error::MissingError) + .and_then(|x| { + TransactionResult::read_xdr_base64(&mut Limited::new( + x.as_bytes(), + Limits::none(), + )) + .map_err(|_| Error::InvalidResponse) + }) + .map(|r| r.result); + tracing::error!("TXN failed:\n {error:#?}"); + return Err(Error::TransactionSubmissionFailed(format!("{:#?}", error?))); + } + // even if status == "success" we need to query the transaction status in order to get the result + + // Poll the transaction status + let start = Instant::now(); + loop { + let response: GetTransactionResponse = self.get_transaction(&hash).await?.try_into()?; + match response.status.as_str() { + "SUCCESS" => { + // TODO: the caller should probably be printing this + tracing::trace!("{response:#?}"); + let GetTransactionResponse { + result, + result_meta, + .. + } = response; + let meta = result_meta.ok_or(Error::MissingResult)?; + let events = extract_events(&meta); + return Ok((result.ok_or(Error::MissingResult)?, meta, events)); + } + "FAILED" => { + tracing::error!("{response:#?}"); + // TODO: provide a more elaborate error + return Err(Error::TransactionSubmissionFailed(format!( + "{:#?}", + response.result + ))); + } + "NOT_FOUND" => (), + _ => { + return Err(Error::UnexpectedTransactionStatus(response.status)); + } + }; + let duration = start.elapsed(); + // TODO: parameterize the timeout instead of using a magic constant + if duration.as_secs() > 10 { + return Err(Error::TransactionSubmissionTimeout); + } + sleep(Duration::from_secs(1)).await; + } + } + + pub async fn simulate_transaction( + &self, + tx: &TransactionEnvelope, + ) -> Result { + tracing::trace!("Simulating:\n{tx:#?}"); + let base64_tx = tx.to_xdr_base64(Limits::none())?; + let mut builder = ObjectParams::new(); + builder.insert("transaction", base64_tx)?; + let response: SimulateTransactionResponse = self + .client()? + .request("simulateTransaction", builder) + .await?; + tracing::trace!("Simulation response:\n {response:#?}"); + match response.error { + None => Ok(response), + Some(e) => { + crate::log::diagnostic_events(&response.events, tracing::Level::ERROR); + Err(Error::TransactionSimulationFailed(e)) + } + } + } + + pub async fn prepare_and_send_transaction( + &self, + tx_without_preflight: &Transaction, + source_key: &ed25519_dalek::SigningKey, + signers: &[ed25519_dalek::SigningKey], + network_passphrase: &str, + log_events: Option, + log_resources: Option, + ) -> Result<(TransactionResult, TransactionMeta, Vec), Error> { + let txn = txn::Assembled::new(tx_without_preflight, self).await?; + let seq_num = txn.sim_res().latest_ledger + 60; //5 min; + let authorized = txn + .handle_restore(self, source_key, network_passphrase) + .await? + .authorize(self, source_key, signers, seq_num, network_passphrase) + .await?; + authorized.log(log_events, log_resources)?; + let tx = authorized.sign(source_key, network_passphrase)?; + self.send_transaction(&tx).await + } + + pub async fn get_transaction(&self, tx_id: &str) -> Result { + Ok(self + .client()? + .request("getTransaction", rpc_params![tx_id]) + .await?) + } + + pub async fn get_ledger_entries( + &self, + keys: &[LedgerKey], + ) -> Result { + let mut base64_keys: Vec = vec![]; + for k in keys { + let base64_result = k.to_xdr_base64(Limits::none()); + if base64_result.is_err() { + return Err(Error::Xdr(XdrError::Invalid)); + } + base64_keys.push(k.to_xdr_base64(Limits::none()).unwrap()); + } + Ok(self + .client()? + .request("getLedgerEntries", rpc_params![base64_keys]) + .await?) + } + + pub async fn get_full_ledger_entries( + &self, + ledger_keys: &[LedgerKey], + ) -> Result { + let keys = ledger_keys + .iter() + .filter(|key| !matches!(key, LedgerKey::Ttl(_))) + .map(Clone::clone) + .collect::>(); + tracing::trace!("keys: {keys:#?}"); + let GetLedgerEntriesResponse { + entries, + latest_ledger, + } = self.get_ledger_entries(&keys).await?; + tracing::trace!("raw: {entries:#?}"); + let entries = entries + .unwrap_or_default() + .iter() + .map( + |LedgerEntryResult { + key, + xdr, + last_modified_ledger, + live_until_ledger_seq_ledger_seq, + }| { + Ok(FullLedgerEntry { + key: LedgerKey::from_xdr_base64(key, Limits::none())?, + val: LedgerEntryData::from_xdr_base64(xdr, Limits::none())?, + live_until_ledger_seq: live_until_ledger_seq_ledger_seq.unwrap_or_default(), + last_modified_ledger: *last_modified_ledger, + }) + }, + ) + .collect::, Error>>()?; + tracing::trace!("parsed: {entries:#?}"); + Ok(FullLedgerEntries { + entries, + latest_ledger, + }) + } + + pub async fn get_events( + &self, + start: EventStart, + event_type: Option, + contract_ids: &[String], + topics: &[String], + limit: Option, + ) -> Result { + let mut filters = serde_json::Map::new(); + + event_type + .and_then(|t| match t { + EventType::All => None, // all is the default, so avoid incl. the param + EventType::Contract => Some("contract"), + EventType::System => Some("system"), + }) + .map(|t| filters.insert("type".to_string(), t.into())); + + filters.insert("topics".to_string(), topics.into()); + filters.insert("contractIds".to_string(), contract_ids.into()); + + let mut pagination = serde_json::Map::new(); + if let Some(limit) = limit { + pagination.insert("limit".to_string(), limit.into()); + } + + let mut oparams = ObjectParams::new(); + match start { + EventStart::Ledger(l) => oparams.insert("startLedger", l)?, + EventStart::Cursor(c) => { + pagination.insert("cursor".to_string(), c.into()); + } + }; + oparams.insert("filters", vec![filters])?; + oparams.insert("pagination", pagination)?; + + Ok(self.client()?.request("getEvents", oparams).await?) + } + + pub async fn get_contract_data( + &self, + contract_id: &[u8; 32], + ) -> Result { + // Get the contract from the network + let contract_key = LedgerKey::ContractData(xdr::LedgerKeyContractData { + contract: xdr::ScAddress::Contract(xdr::Hash(*contract_id)), + key: xdr::ScVal::LedgerKeyContractInstance, + durability: xdr::ContractDataDurability::Persistent, + }); + let contract_ref = self.get_ledger_entries(&[contract_key]).await?; + let entries = contract_ref.entries.unwrap_or_default(); + if entries.is_empty() { + let contract_address = stellar_strkey::Contract(*contract_id).to_string(); + return Err(Error::NotFound("Contract".to_string(), contract_address)); + } + let contract_ref_entry = &entries[0]; + match LedgerEntryData::from_xdr_base64(&contract_ref_entry.xdr, Limits::none())? { + LedgerEntryData::ContractData(contract_data) => Ok(contract_data), + scval => Err(Error::UnexpectedContractCodeDataType(scval)), + } + } + + pub async fn get_remote_wasm(&self, contract_id: &[u8; 32]) -> Result, Error> { + match self.get_contract_data(contract_id).await? { + xdr::ContractDataEntry { + val: + xdr::ScVal::ContractInstance(xdr::ScContractInstance { + executable: xdr::ContractExecutable::Wasm(hash), + .. + }), + .. + } => self.get_remote_wasm_from_hash(hash).await, + scval => Err(Error::UnexpectedToken(scval)), + } + } + + pub async fn get_remote_wasm_from_hash(&self, hash: xdr::Hash) -> Result, Error> { + let code_key = LedgerKey::ContractCode(xdr::LedgerKeyContractCode { hash: hash.clone() }); + let contract_data = self.get_ledger_entries(&[code_key]).await?; + let entries = contract_data.entries.unwrap_or_default(); + if entries.is_empty() { + return Err(Error::NotFound( + "Contract Code".to_string(), + hex::encode(hash), + )); + } + let contract_data_entry = &entries[0]; + match LedgerEntryData::from_xdr_base64(&contract_data_entry.xdr, Limits::none())? { + LedgerEntryData::ContractCode(xdr::ContractCodeEntry { code, .. }) => Ok(code.into()), + scval => Err(Error::UnexpectedContractCodeDataType(scval)), + } + } + + pub async fn get_remote_contract_spec( + &self, + contract_id: &[u8; 32], + ) -> Result, Error> { + let contract_data = self.get_contract_data(contract_id).await?; + match contract_data.val { + xdr::ScVal::ContractInstance(xdr::ScContractInstance { + executable: xdr::ContractExecutable::Wasm(hash), + .. + }) => Ok(contract_spec::ContractSpec::new( + &self.get_remote_wasm_from_hash(hash).await?, + ) + .map_err(Error::CouldNotParseContractSpec)? + .spec), + xdr::ScVal::ContractInstance(xdr::ScContractInstance { + executable: xdr::ContractExecutable::StellarAsset, + .. + }) => Ok(soroban_spec::read::parse_raw( + &token::StellarAssetSpec::spec_xdr(), + )?), + _ => Err(Error::Xdr(XdrError::Invalid)), + } + } +} + +fn extract_events(tx_meta: &TransactionMeta) -> Vec { + match tx_meta { + TransactionMeta::V3(TransactionMetaV3 { + soroban_meta: Some(meta), + .. + }) => { + // NOTE: we assume there can only be one operation, since we only send one + if meta.diagnostic_events.len() == 1 { + meta.diagnostic_events.clone().into() + } else if meta.events.len() == 1 { + meta.events + .iter() + .map(|e| DiagnosticEvent { + in_successful_contract_call: true, + event: e.clone(), + }) + .collect() + } else { + Vec::new() + } + } + _ => Vec::new(), + } +} + +pub fn parse_cursor(c: &str) -> Result<(u64, i32), Error> { + let (toid_part, event_index) = c.split('-').collect_tuple().ok_or(Error::InvalidCursor)?; + let toid_part: u64 = toid_part.parse().map_err(|_| Error::InvalidCursor)?; + let start_index: i32 = event_index.parse().map_err(|_| Error::InvalidCursor)?; + Ok((toid_part, start_index)) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn simulation_transaction_response_parsing() { + let s = r#"{ + "minResourceFee": "100000000", + "cost": { "cpuInsns": "1000", "memBytes": "1000" }, + "transactionData": "", + "latestLedger": 1234 + }"#; + + let resp: SimulateTransactionResponse = serde_json::from_str(s).unwrap(); + assert_eq!(resp.min_resource_fee, 100_000_000); + } + + #[test] + fn simulation_transaction_response_parsing_mostly_empty() { + let s = r#"{ + "latestLedger": 1234 + }"#; + + let resp: SimulateTransactionResponse = serde_json::from_str(s).unwrap(); + assert_eq!(resp.latest_ledger, 1_234); + } + + #[test] + fn test_rpc_url_default_ports() { + // Default ports are added. + let client = Client::new("http://example.com").unwrap(); + assert_eq!(client.base_url, "http://example.com:80/"); + let client = Client::new("https://example.com").unwrap(); + assert_eq!(client.base_url, "https://example.com:443/"); + + // Ports are not added when already present. + let client = Client::new("http://example.com:8080").unwrap(); + assert_eq!(client.base_url, "http://example.com:8080/"); + let client = Client::new("https://example.com:8080").unwrap(); + assert_eq!(client.base_url, "https://example.com:8080/"); + + // Paths are not modified. + let client = Client::new("http://example.com/a/b/c").unwrap(); + assert_eq!(client.base_url, "http://example.com:80/a/b/c"); + let client = Client::new("https://example.com/a/b/c").unwrap(); + assert_eq!(client.base_url, "https://example.com:443/a/b/c"); + let client = Client::new("http://example.com/a/b/c/").unwrap(); + assert_eq!(client.base_url, "http://example.com:80/a/b/c/"); + let client = Client::new("https://example.com/a/b/c/").unwrap(); + assert_eq!(client.base_url, "https://example.com:443/a/b/c/"); + let client = Client::new("http://example.com/a/b:80/c/").unwrap(); + assert_eq!(client.base_url, "http://example.com:80/a/b:80/c/"); + let client = Client::new("https://example.com/a/b:80/c/").unwrap(); + assert_eq!(client.base_url, "https://example.com:443/a/b:80/c/"); + } + + #[test] + // Taken from [RPC server + // tests](https://github.com/stellar/soroban-tools/blob/main/cmd/soroban-rpc/internal/methods/get_events_test.go#L21). + fn test_does_topic_match() { + struct TestCase<'a> { + name: &'a str, + filter: Vec<&'a str>, + includes: Vec>, + excludes: Vec>, + } + + let xfer = "AAAABQAAAAh0cmFuc2Zlcg=="; + let number = "AAAAAQB6Mcc="; + let star = "*"; + + for tc in vec![ + // No filter means match nothing. + TestCase { + name: "", + filter: vec![], + includes: vec![], + excludes: vec![vec![xfer]], + }, + // "*" should match "transfer/" but not "transfer/transfer" or + // "transfer/amount", because * is specified as a SINGLE segment + // wildcard. + TestCase { + name: "*", + filter: vec![star], + includes: vec![vec![xfer]], + excludes: vec![vec![xfer, xfer], vec![xfer, number]], + }, + // "*/transfer" should match anything preceding "transfer", but + // nothing that isn't exactly two segments long. + TestCase { + name: "*/transfer", + filter: vec![star, xfer], + includes: vec![vec![number, xfer], vec![xfer, xfer]], + excludes: vec![ + vec![number], + vec![number, number], + vec![number, xfer, number], + vec![xfer], + vec![xfer, number], + vec![xfer, xfer, xfer], + ], + }, + // The inverse case of before: "transfer/*" should match any single + // segment after a segment that is exactly "transfer", but no + // additional segments. + TestCase { + name: "transfer/*", + filter: vec![xfer, star], + includes: vec![vec![xfer, number], vec![xfer, xfer]], + excludes: vec![ + vec![number], + vec![number, number], + vec![number, xfer, number], + vec![xfer], + vec![number, xfer], + vec![xfer, xfer, xfer], + ], + }, + // Here, we extend to exactly two wild segments after transfer. + TestCase { + name: "transfer/*/*", + filter: vec![xfer, star, star], + includes: vec![vec![xfer, number, number], vec![xfer, xfer, xfer]], + excludes: vec![ + vec![number], + vec![number, number], + vec![number, xfer], + vec![number, xfer, number, number], + vec![xfer], + vec![xfer, xfer, xfer, xfer], + ], + }, + // Here, we ensure wildcards can be in the middle of a filter: only + // exact matches happen on the ends, while the middle can be + // anything. + TestCase { + name: "transfer/*/number", + filter: vec![xfer, star, number], + includes: vec![vec![xfer, number, number], vec![xfer, xfer, number]], + excludes: vec![ + vec![number], + vec![number, number], + vec![number, number, number], + vec![number, xfer, number], + vec![xfer], + vec![number, xfer], + vec![xfer, xfer, xfer], + vec![xfer, number, xfer], + ], + }, + ] { + for topic in tc.includes { + assert!( + does_topic_match( + &topic + .iter() + .map(std::string::ToString::to_string) + .collect::>(), + &tc.filter + .iter() + .map(std::string::ToString::to_string) + .collect::>() + ), + "test: {}, topic ({:?}) should be matched by filter ({:?})", + tc.name, + topic, + tc.filter + ); + } + + for topic in tc.excludes { + assert!( + !does_topic_match( + // make deep copies of the vecs + &topic + .iter() + .map(std::string::ToString::to_string) + .collect::>(), + &tc.filter + .iter() + .map(std::string::ToString::to_string) + .collect::>() + ), + "test: {}, topic ({:?}) should NOT be matched by filter ({:?})", + tc.name, + topic, + tc.filter + ); + } + } + } +} diff --git a/cmd/soroban-cli/src/rpc/txn.rs b/cmd/soroban-cli/src/rpc/txn.rs new file mode 100644 index 00000000..9e36938d --- /dev/null +++ b/cmd/soroban-cli/src/rpc/txn.rs @@ -0,0 +1,610 @@ +use ed25519_dalek::Signer; +use sha2::{Digest, Sha256}; +use soroban_env_host::xdr::{ + self, AccountId, DecoratedSignature, ExtensionPoint, Hash, HashIdPreimage, + HashIdPreimageSorobanAuthorization, InvokeHostFunctionOp, Limits, Memo, Operation, + OperationBody, Preconditions, PublicKey, ReadXdr, RestoreFootprintOp, ScAddress, ScMap, + ScSymbol, ScVal, Signature, SignatureHint, SorobanAddressCredentials, + SorobanAuthorizationEntry, SorobanAuthorizedFunction, SorobanCredentials, SorobanResources, + SorobanTransactionData, Transaction, TransactionEnvelope, TransactionExt, + TransactionSignaturePayload, TransactionSignaturePayloadTaggedTransaction, + TransactionV1Envelope, Uint256, VecM, WriteXdr, +}; + +use crate::rpc::{Client, Error, RestorePreamble, SimulateTransactionResponse}; + +use super::{LogEvents, LogResources}; + +pub struct Assembled { + txn: Transaction, + sim_res: SimulateTransactionResponse, +} + +impl Assembled { + pub async fn new(txn: &Transaction, client: &Client) -> Result { + let sim_res = Self::simulate(txn, client).await?; + let txn = assemble(txn, &sim_res)?; + Ok(Self { txn, sim_res }) + } + + pub fn hash(&self, network_passphrase: &str) -> Result<[u8; 32], xdr::Error> { + let signature_payload = TransactionSignaturePayload { + network_id: Hash(Sha256::digest(network_passphrase).into()), + tagged_transaction: TransactionSignaturePayloadTaggedTransaction::Tx(self.txn.clone()), + }; + Ok(Sha256::digest(signature_payload.to_xdr(Limits::none())?).into()) + } + + pub fn sign( + self, + key: &ed25519_dalek::SigningKey, + network_passphrase: &str, + ) -> Result { + let tx = self.txn(); + let tx_hash = self.hash(network_passphrase)?; + let tx_signature = key.sign(&tx_hash); + + let decorated_signature = DecoratedSignature { + hint: SignatureHint(key.verifying_key().to_bytes()[28..].try_into()?), + signature: Signature(tx_signature.to_bytes().try_into()?), + }; + + Ok(TransactionEnvelope::Tx(TransactionV1Envelope { + tx: tx.clone(), + signatures: vec![decorated_signature].try_into()?, + })) + } + + pub async fn simulate( + tx: &Transaction, + client: &Client, + ) -> Result { + client + .simulate_transaction(&TransactionEnvelope::Tx(TransactionV1Envelope { + tx: tx.clone(), + signatures: VecM::default(), + })) + .await + } + + pub async fn handle_restore( + self, + client: &Client, + source_key: &ed25519_dalek::SigningKey, + network_passphrase: &str, + ) -> Result { + if let Some(restore_preamble) = &self.sim_res.restore_preamble { + // Build and submit the restore transaction + client + .send_transaction( + &Assembled::new(&restore(self.txn(), restore_preamble)?, client) + .await? + .sign(source_key, network_passphrase)?, + ) + .await?; + Ok(self.bump_seq_num()) + } else { + Ok(self) + } + } + + pub fn txn(&self) -> &Transaction { + &self.txn + } + + pub fn sim_res(&self) -> &SimulateTransactionResponse { + &self.sim_res + } + + pub async fn authorize( + self, + client: &Client, + source_key: &ed25519_dalek::SigningKey, + signers: &[ed25519_dalek::SigningKey], + seq_num: u32, + network_passphrase: &str, + ) -> Result { + if let Some(txn) = sign_soroban_authorizations( + self.txn(), + source_key, + signers, + seq_num, + network_passphrase, + )? { + Self::new(&txn, client).await + } else { + Ok(self) + } + } + + pub fn bump_seq_num(mut self) -> Self { + self.txn.seq_num.0 += 1; + self + } + + pub fn auth(&self) -> VecM { + self.txn + .operations + .first() + .and_then(|op| match op.body { + OperationBody::InvokeHostFunction(ref body) => (matches!( + body.auth.first().map(|x| &x.root_invocation.function), + Some(&SorobanAuthorizedFunction::ContractFn(_)) + )) + .then_some(body.auth.clone()), + _ => None, + }) + .unwrap_or_default() + } + + pub fn log( + &self, + log_events: Option, + log_resources: Option, + ) -> Result<(), Error> { + if let TransactionExt::V1(SorobanTransactionData { + resources: resources @ SorobanResources { footprint, .. }, + .. + }) = &self.txn.ext + { + if let Some(log) = log_resources { + log(resources); + } + if let Some(log) = log_events { + log(footprint, &[self.auth()], &self.sim_res.events()?); + }; + } + Ok(()) + } +} + +// Apply the result of a simulateTransaction onto a transaction envelope, preparing it for +// submission to the network. +pub fn assemble( + raw: &Transaction, + simulation: &SimulateTransactionResponse, +) -> Result { + let mut tx = raw.clone(); + + // Right now simulate.results is one-result-per-function, and assumes there is only one + // operation in the txn, so we need to enforce that here. I (Paul) think that is a bug + // in soroban-rpc.simulateTransaction design, and we should fix it there. + // TODO: We should to better handling so non-soroban txns can be a passthrough here. + if tx.operations.len() != 1 { + return Err(Error::UnexpectedOperationCount { + count: tx.operations.len(), + }); + } + + let transaction_data = simulation.transaction_data()?; + + let mut op = tx.operations[0].clone(); + if let OperationBody::InvokeHostFunction(ref mut body) = &mut op.body { + if body.auth.is_empty() { + if simulation.results.len() != 1 { + return Err(Error::UnexpectedSimulateTransactionResultSize { + length: simulation.results.len(), + }); + } + + let auths = simulation + .results + .iter() + .map(|r| { + VecM::try_from( + r.auth + .iter() + .map(|v| SorobanAuthorizationEntry::from_xdr_base64(v, Limits::none())) + .collect::, _>>()?, + ) + }) + .collect::, _>>()?; + if !auths.is_empty() { + body.auth = auths[0].clone(); + } + } + } + + // update the fees of the actual transaction to meet the minimum resource fees. + let classic_transaction_fees = crate::fee::Args::default().fee; + // Pad the fees up by 15% for a bit of wiggle room. + tx.fee = (tx.fee.max( + classic_transaction_fees + + u32::try_from(simulation.min_resource_fee) + .map_err(|_| Error::LargeFee(simulation.min_resource_fee))?, + ) * 115) + / 100; + + tx.operations = vec![op].try_into()?; + tx.ext = TransactionExt::V1(transaction_data); + Ok(tx) +} + +// Use the given source_key and signers, to sign all SorobanAuthorizationEntry's in the given +// transaction. If unable to sign, return an error. +fn sign_soroban_authorizations( + raw: &Transaction, + source_key: &ed25519_dalek::SigningKey, + signers: &[ed25519_dalek::SigningKey], + signature_expiration_ledger: u32, + network_passphrase: &str, +) -> Result, Error> { + let mut tx = raw.clone(); + let mut op = match tx.operations.as_slice() { + [op @ Operation { + body: OperationBody::InvokeHostFunction(InvokeHostFunctionOp { auth, .. }), + .. + }] if matches!( + auth.first().map(|x| &x.root_invocation.function), + Some(&SorobanAuthorizedFunction::ContractFn(_)) + ) => + { + op.clone() + } + _ => return Ok(None), + }; + + let Operation { + body: OperationBody::InvokeHostFunction(ref mut body), + .. + } = op + else { + return Ok(None); + }; + + let network_id = Hash(Sha256::digest(network_passphrase.as_bytes()).into()); + + let verification_key = source_key.verifying_key(); + let source_address = verification_key.as_bytes(); + + let signed_auths = body + .auth + .as_slice() + .iter() + .map(|raw_auth| { + let mut auth = raw_auth.clone(); + let SorobanAuthorizationEntry { + credentials: SorobanCredentials::Address(ref mut credentials), + .. + } = auth + else { + // Doesn't need special signing + return Ok(auth); + }; + let SorobanAddressCredentials { ref address, .. } = credentials; + + // See if we have a signer for this authorizationEntry + // If not, then we Error + let needle = match address { + ScAddress::Account(AccountId(PublicKey::PublicKeyTypeEd25519(Uint256(ref a)))) => a, + ScAddress::Contract(Hash(c)) => { + // This address is for a contract. This means we're using a custom + // smart-contract account. Currently the CLI doesn't support that yet. + return Err(Error::MissingSignerForAddress { + address: stellar_strkey::Strkey::Contract(stellar_strkey::Contract(*c)) + .to_string(), + }); + } + }; + let signer = if let Some(s) = signers + .iter() + .find(|s| needle == s.verifying_key().as_bytes()) + { + s + } else if needle == source_address { + // This is the source address, so we can sign it + source_key + } else { + // We don't have a signer for this address + return Err(Error::MissingSignerForAddress { + address: stellar_strkey::Strkey::PublicKeyEd25519( + stellar_strkey::ed25519::PublicKey(*needle), + ) + .to_string(), + }); + }; + + sign_soroban_authorization_entry( + raw_auth, + signer, + signature_expiration_ledger, + &network_id, + ) + }) + .collect::, Error>>()?; + + body.auth = signed_auths.try_into()?; + tx.operations = vec![op].try_into()?; + Ok(Some(tx)) +} + +fn sign_soroban_authorization_entry( + raw: &SorobanAuthorizationEntry, + signer: &ed25519_dalek::SigningKey, + signature_expiration_ledger: u32, + network_id: &Hash, +) -> Result { + let mut auth = raw.clone(); + let SorobanAuthorizationEntry { + credentials: SorobanCredentials::Address(ref mut credentials), + .. + } = auth + else { + // Doesn't need special signing + return Ok(auth); + }; + let SorobanAddressCredentials { nonce, .. } = credentials; + + let preimage = HashIdPreimage::SorobanAuthorization(HashIdPreimageSorobanAuthorization { + network_id: network_id.clone(), + invocation: auth.root_invocation.clone(), + nonce: *nonce, + signature_expiration_ledger, + }) + .to_xdr(Limits::none())?; + + let payload = Sha256::digest(preimage); + let signature = signer.sign(&payload); + + let map = ScMap::sorted_from(vec![ + ( + ScVal::Symbol(ScSymbol("public_key".try_into()?)), + ScVal::Bytes( + signer + .verifying_key() + .to_bytes() + .to_vec() + .try_into() + .map_err(Error::Xdr)?, + ), + ), + ( + ScVal::Symbol(ScSymbol("signature".try_into()?)), + ScVal::Bytes( + signature + .to_bytes() + .to_vec() + .try_into() + .map_err(Error::Xdr)?, + ), + ), + ]) + .map_err(Error::Xdr)?; + credentials.signature = ScVal::Vec(Some( + vec![ScVal::Map(Some(map))].try_into().map_err(Error::Xdr)?, + )); + credentials.signature_expiration_ledger = signature_expiration_ledger; + auth.credentials = SorobanCredentials::Address(credentials.clone()); + Ok(auth) +} + +pub fn restore(parent: &Transaction, restore: &RestorePreamble) -> Result { + let transaction_data = + SorobanTransactionData::from_xdr_base64(&restore.transaction_data, Limits::none())?; + let fee = u32::try_from(restore.min_resource_fee) + .map_err(|_| Error::LargeFee(restore.min_resource_fee))?; + Ok(Transaction { + source_account: parent.source_account.clone(), + fee: parent + .fee + .checked_add(fee) + .ok_or(Error::LargeFee(restore.min_resource_fee))?, + seq_num: parent.seq_num.clone(), + cond: Preconditions::None, + memo: Memo::None, + operations: vec![Operation { + source_account: None, + body: OperationBody::RestoreFootprint(RestoreFootprintOp { + ext: ExtensionPoint::V0, + }), + }] + .try_into() + .unwrap(), + ext: TransactionExt::V1(transaction_data), + }) +} + +#[cfg(test)] +mod tests { + use super::*; + + use super::super::SimulateHostFunctionResultRaw; + use soroban_env_host::xdr::{ + self, AccountId, ChangeTrustAsset, ChangeTrustOp, ExtensionPoint, Hash, HostFunction, + InvokeContractArgs, InvokeHostFunctionOp, LedgerFootprint, Memo, MuxedAccount, Operation, + Preconditions, PublicKey, ScAddress, ScSymbol, ScVal, SequenceNumber, + SorobanAuthorizedFunction, SorobanAuthorizedInvocation, SorobanResources, + SorobanTransactionData, Uint256, WriteXdr, + }; + use stellar_strkey::ed25519::PublicKey as Ed25519PublicKey; + + const SOURCE: &str = "GBZXN7PIRZGNMHGA7MUUUF4GWPY5AYPV6LY4UV2GL6VJGIQRXFDNMADI"; + + fn transaction_data() -> SorobanTransactionData { + SorobanTransactionData { + resources: SorobanResources { + footprint: LedgerFootprint { + read_only: VecM::default(), + read_write: VecM::default(), + }, + instructions: 0, + read_bytes: 5, + write_bytes: 0, + }, + resource_fee: 0, + ext: ExtensionPoint::V0, + } + } + + fn simulation_response() -> SimulateTransactionResponse { + let source_bytes = Ed25519PublicKey::from_string(SOURCE).unwrap().0; + let fn_auth = &SorobanAuthorizationEntry { + credentials: xdr::SorobanCredentials::Address(xdr::SorobanAddressCredentials { + address: ScAddress::Account(AccountId(PublicKey::PublicKeyTypeEd25519(Uint256( + source_bytes, + )))), + nonce: 0, + signature_expiration_ledger: 0, + signature: ScVal::Void, + }), + root_invocation: SorobanAuthorizedInvocation { + function: SorobanAuthorizedFunction::ContractFn(InvokeContractArgs { + contract_address: ScAddress::Contract(Hash([0; 32])), + function_name: ScSymbol("fn".try_into().unwrap()), + args: VecM::default(), + }), + sub_invocations: VecM::default(), + }, + }; + + SimulateTransactionResponse { + min_resource_fee: 115, + latest_ledger: 3, + results: vec![SimulateHostFunctionResultRaw { + auth: vec![fn_auth.to_xdr_base64(Limits::none()).unwrap()], + xdr: ScVal::U32(0).to_xdr_base64(Limits::none()).unwrap(), + }], + transaction_data: transaction_data().to_xdr_base64(Limits::none()).unwrap(), + ..Default::default() + } + } + + fn single_contract_fn_transaction() -> Transaction { + let source_bytes = Ed25519PublicKey::from_string(SOURCE).unwrap().0; + Transaction { + source_account: MuxedAccount::Ed25519(Uint256(source_bytes)), + fee: 100, + seq_num: SequenceNumber(0), + cond: Preconditions::None, + memo: Memo::None, + operations: vec![Operation { + source_account: None, + body: OperationBody::InvokeHostFunction(InvokeHostFunctionOp { + host_function: HostFunction::InvokeContract(InvokeContractArgs { + contract_address: ScAddress::Contract(Hash([0x0; 32])), + function_name: ScSymbol::default(), + args: VecM::default(), + }), + auth: VecM::default(), + }), + }] + .try_into() + .unwrap(), + ext: TransactionExt::V0, + } + } + + #[test] + fn test_assemble_transaction_updates_tx_data_from_simulation_response() { + let sim = simulation_response(); + let txn = single_contract_fn_transaction(); + let Ok(result) = assemble(&txn, &sim) else { + panic!("assemble failed"); + }; + + // validate it auto updated the tx fees from sim response fees + // since it was greater than tx.fee + assert_eq!(247, result.fee); + + // validate it updated sorobantransactiondata block in the tx ext + assert_eq!(TransactionExt::V1(transaction_data()), result.ext); + } + + #[test] + fn test_assemble_transaction_adds_the_auth_to_the_host_function() { + let sim = simulation_response(); + let txn = single_contract_fn_transaction(); + let Ok(result) = assemble(&txn, &sim) else { + panic!("assemble failed"); + }; + + assert_eq!(1, result.operations.len()); + let OperationBody::InvokeHostFunction(ref op) = result.operations[0].body else { + panic!("unexpected operation type: {:#?}", result.operations[0]); + }; + + assert_eq!(1, op.auth.len()); + let auth = &op.auth[0]; + + let xdr::SorobanAuthorizedFunction::ContractFn(xdr::InvokeContractArgs { + ref function_name, + .. + }) = auth.root_invocation.function + else { + panic!("unexpected function type"); + }; + assert_eq!("fn".to_string(), format!("{}", function_name.0)); + + let xdr::SorobanCredentials::Address(xdr::SorobanAddressCredentials { + address: + xdr::ScAddress::Account(xdr::AccountId(xdr::PublicKey::PublicKeyTypeEd25519(address))), + .. + }) = &auth.credentials + else { + panic!("unexpected credentials type"); + }; + assert_eq!( + SOURCE.to_string(), + stellar_strkey::ed25519::PublicKey(address.0).to_string() + ); + } + + #[test] + fn test_assemble_transaction_errors_for_non_invokehostfn_ops() { + let source_bytes = Ed25519PublicKey::from_string(SOURCE).unwrap().0; + let txn = Transaction { + source_account: MuxedAccount::Ed25519(Uint256(source_bytes)), + fee: 100, + seq_num: SequenceNumber(0), + cond: Preconditions::None, + memo: Memo::None, + operations: vec![Operation { + source_account: None, + body: OperationBody::ChangeTrust(ChangeTrustOp { + line: ChangeTrustAsset::Native, + limit: 0, + }), + }] + .try_into() + .unwrap(), + ext: TransactionExt::V0, + }; + + let result = assemble( + &txn, + &SimulateTransactionResponse { + min_resource_fee: 115, + transaction_data: transaction_data().to_xdr_base64(Limits::none()).unwrap(), + latest_ledger: 3, + ..Default::default() + }, + ); + + match result { + Ok(_) => {} + Err(e) => panic!("expected assembled operation, got: {e:#?}"), + } + } + + #[test] + fn test_assemble_transaction_errors_for_errors_for_mismatched_simulation() { + let txn = single_contract_fn_transaction(); + + let result = assemble( + &txn, + &SimulateTransactionResponse { + min_resource_fee: 115, + transaction_data: transaction_data().to_xdr_base64(Limits::none()).unwrap(), + latest_ledger: 3, + ..Default::default() + }, + ); + + match result { + Err(Error::UnexpectedSimulateTransactionResultSize { length }) => { + assert_eq!(0, length); + } + r => panic!("expected UnexpectedSimulateTransactionResultSize error, got: {r:#?}"), + } + } +} diff --git a/cmd/soroban-cli/src/toid.rs b/cmd/soroban-cli/src/toid.rs new file mode 100644 index 00000000..55c89049 --- /dev/null +++ b/cmd/soroban-cli/src/toid.rs @@ -0,0 +1,69 @@ +/// A barebones implementation of Total Order IDs (TOIDs) from +/// [SEP-35](https://stellar.org/protocol/sep-35), using the reference +/// implementation from the Go +/// [`stellar/go/toid`](https://github.com/stellar/go/blob/b4ba6f8e67f274bf84d21b0effb01ea8a914b766/toid/main.go#L8-L56) +/// package. +#[derive(Copy, Clone)] +pub struct Toid { + ledger_sequence: u32, + transaction_order: u32, + operation_order: u32, +} + +const LEDGER_MASK: u64 = (1 << 32) - 1; +const TRANSACTION_MASK: u64 = (1 << 20) - 1; +const OPERATION_MASK: u64 = (1 << 12) - 1; +const LEDGER_SHIFT: u64 = 32; +const TRANSACTION_SHIFT: u64 = 12; +const OPERATION_SHIFT: u64 = 0; + +impl Toid { + pub fn new(ledger: u32, tx_order: u32, op_order: u32) -> Toid { + Toid { + ledger_sequence: ledger, + transaction_order: tx_order, + operation_order: op_order, + } + } + + pub fn to_paging_token(self) -> String { + let u: u64 = self.into(); + format!("{u:019}") + } +} + +impl From for Toid { + fn from(item: u64) -> Self { + let ledger: u32 = ((item & LEDGER_MASK) >> LEDGER_SHIFT).try_into().unwrap(); + let tx_order: u32 = ((item & TRANSACTION_MASK) >> TRANSACTION_SHIFT) + .try_into() + .unwrap(); + let op_order: u32 = ((item & OPERATION_MASK) >> OPERATION_SHIFT) + .try_into() + .unwrap(); + + Toid::new(ledger, tx_order, op_order) + } +} + +impl From for u64 { + fn from(item: Toid) -> Self { + let l: u64 = item.ledger_sequence.into(); + let t: u64 = item.transaction_order.into(); + let o: u64 = item.operation_order.into(); + + let mut result: u64 = 0; + result |= (l & LEDGER_MASK) << LEDGER_SHIFT; + result |= (t & TRANSACTION_MASK) << TRANSACTION_SHIFT; + result |= (o & OPERATION_MASK) << OPERATION_SHIFT; + + result + } +} + +impl ToString for Toid { + fn to_string(&self) -> String { + let u: u64 = (*self).into(); + u.to_string() + } +} diff --git a/cmd/soroban-cli/src/utils.rs b/cmd/soroban-cli/src/utils.rs new file mode 100644 index 00000000..ff0018a9 --- /dev/null +++ b/cmd/soroban-cli/src/utils.rs @@ -0,0 +1,244 @@ +use ed25519_dalek::Signer; +use sha2::{Digest, Sha256}; +use stellar_strkey::ed25519::PrivateKey; + +use soroban_env_host::xdr::{ + Asset, ContractIdPreimage, DecoratedSignature, Error as XdrError, Hash, HashIdPreimage, + HashIdPreimageContractId, Limits, Signature, SignatureHint, Transaction, TransactionEnvelope, + TransactionSignaturePayload, TransactionSignaturePayloadTaggedTransaction, + TransactionV1Envelope, WriteXdr, +}; + +pub mod contract_spec; + +/// # Errors +/// +/// Might return an error +pub fn contract_hash(contract: &[u8]) -> Result { + Ok(Hash(Sha256::digest(contract).into())) +} + +/// # Errors +/// +/// Might return an error +pub fn transaction_hash(tx: &Transaction, network_passphrase: &str) -> Result<[u8; 32], XdrError> { + let signature_payload = TransactionSignaturePayload { + network_id: Hash(Sha256::digest(network_passphrase).into()), + tagged_transaction: TransactionSignaturePayloadTaggedTransaction::Tx(tx.clone()), + }; + Ok(Sha256::digest(signature_payload.to_xdr(Limits::none())?).into()) +} + +/// # Errors +/// +/// Might return an error +pub fn sign_transaction( + key: &ed25519_dalek::SigningKey, + tx: &Transaction, + network_passphrase: &str, +) -> Result { + let tx_hash = transaction_hash(tx, network_passphrase)?; + let tx_signature = key.sign(&tx_hash); + + let decorated_signature = DecoratedSignature { + hint: SignatureHint(key.verifying_key().to_bytes()[28..].try_into()?), + signature: Signature(tx_signature.to_bytes().try_into()?), + }; + + Ok(TransactionEnvelope::Tx(TransactionV1Envelope { + tx: tx.clone(), + signatures: vec![decorated_signature].try_into()?, + })) +} + +/// # Errors +/// +/// Might return an error +pub fn contract_id_from_str(contract_id: &str) -> Result<[u8; 32], stellar_strkey::DecodeError> { + stellar_strkey::Contract::from_string(contract_id) + .map(|strkey| strkey.0) + .or_else(|_| { + // strkey failed, try to parse it as a hex string, for backwards compatibility. + soroban_spec_tools::utils::padded_hex_from_str(contract_id, 32) + .map_err(|_| stellar_strkey::DecodeError::Invalid)? + .try_into() + .map_err(|_| stellar_strkey::DecodeError::Invalid) + }) + .map_err(|_| stellar_strkey::DecodeError::Invalid) +} + +/// # Errors +/// May not find a config dir +pub fn find_config_dir(mut pwd: std::path::PathBuf) -> std::io::Result { + let soroban_dir = |p: &std::path::Path| p.join(".soroban"); + while !soroban_dir(&pwd).exists() { + if !pwd.pop() { + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + "soroban directory not found", + )); + } + } + Ok(soroban_dir(&pwd)) +} + +pub(crate) fn into_signing_key(key: &PrivateKey) -> ed25519_dalek::SigningKey { + let secret: ed25519_dalek::SecretKey = key.0; + ed25519_dalek::SigningKey::from_bytes(&secret) +} + +/// Used in tests +#[allow(unused)] +pub(crate) fn parse_secret_key( + s: &str, +) -> Result { + Ok(into_signing_key(&PrivateKey::from_string(s)?)) +} + +pub fn is_hex_string(s: &str) -> bool { + s.chars().all(|s| s.is_ascii_hexdigit()) +} + +pub fn contract_id_hash_from_asset( + asset: &Asset, + network_passphrase: &str, +) -> Result { + let network_id = Hash(Sha256::digest(network_passphrase.as_bytes()).into()); + let preimage = HashIdPreimage::ContractId(HashIdPreimageContractId { + network_id, + contract_id_preimage: ContractIdPreimage::Asset(asset.clone()), + }); + let preimage_xdr = preimage.to_xdr(Limits::none())?; + Ok(Hash(Sha256::digest(preimage_xdr).into())) +} + +pub mod parsing { + + use regex::Regex; + use soroban_env_host::xdr::{ + AccountId, AlphaNum12, AlphaNum4, Asset, AssetCode12, AssetCode4, PublicKey, + }; + + #[derive(thiserror::Error, Debug)] + pub enum Error { + #[error("invalid asset code: {asset}")] + InvalidAssetCode { asset: String }, + #[error("cannot parse account id: {account_id}")] + CannotParseAccountId { account_id: String }, + #[error("cannot parse asset: {asset}")] + CannotParseAsset { asset: String }, + #[error(transparent)] + Regex(#[from] regex::Error), + } + + pub fn parse_asset(str: &str) -> Result { + if str == "native" { + return Ok(Asset::Native); + } + let split: Vec<&str> = str.splitn(2, ':').collect(); + if split.len() != 2 { + return Err(Error::CannotParseAsset { + asset: str.to_string(), + }); + } + let code = split[0]; + let issuer = split[1]; + let re = Regex::new("^[[:alnum:]]{1,12}$")?; + if !re.is_match(code) { + return Err(Error::InvalidAssetCode { + asset: str.to_string(), + }); + } + if code.len() <= 4 { + let mut asset_code: [u8; 4] = [0; 4]; + for (i, b) in code.as_bytes().iter().enumerate() { + asset_code[i] = *b; + } + Ok(Asset::CreditAlphanum4(AlphaNum4 { + asset_code: AssetCode4(asset_code), + issuer: parse_account_id(issuer)?, + })) + } else { + let mut asset_code: [u8; 12] = [0; 12]; + for (i, b) in code.as_bytes().iter().enumerate() { + asset_code[i] = *b; + } + Ok(Asset::CreditAlphanum12(AlphaNum12 { + asset_code: AssetCode12(asset_code), + issuer: parse_account_id(issuer)?, + })) + } + } + + pub fn parse_account_id(str: &str) -> Result { + let pk_bytes = stellar_strkey::ed25519::PublicKey::from_string(str) + .map_err(|_| Error::CannotParseAccountId { + account_id: str.to_string(), + })? + .0; + Ok(AccountId(PublicKey::PublicKeyTypeEd25519(pk_bytes.into()))) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_contract_id_from_str() { + // strkey + match contract_id_from_str("CA3D5KRYM6CB7OWQ6TWYRR3Z4T7GNZLKERYNZGGA5SOAOPIFY6YQGAXE") { + Ok(contract_id) => assert_eq!( + contract_id, + [ + 0x36, 0x3e, 0xaa, 0x38, 0x67, 0x84, 0x1f, 0xba, 0xd0, 0xf4, 0xed, 0x88, 0xc7, + 0x79, 0xe4, 0xfe, 0x66, 0xe5, 0x6a, 0x24, 0x70, 0xdc, 0x98, 0xc0, 0xec, 0x9c, + 0x07, 0x3d, 0x05, 0xc7, 0xb1, 0x03, + ] + ), + Err(err) => panic!("Failed to parse contract id: {err}"), + } + + // hex + match contract_id_from_str( + "363eaa3867841fbad0f4ed88c779e4fe66e56a2470dc98c0ec9c073d05c7b103", + ) { + Ok(contract_id) => assert_eq!( + contract_id, + [ + 0x36, 0x3e, 0xaa, 0x38, 0x67, 0x84, 0x1f, 0xba, 0xd0, 0xf4, 0xed, 0x88, 0xc7, + 0x79, 0xe4, 0xfe, 0x66, 0xe5, 0x6a, 0x24, 0x70, 0xdc, 0x98, 0xc0, 0xec, 0x9c, + 0x07, 0x3d, 0x05, 0xc7, 0xb1, 0x03, + ] + ), + Err(err) => panic!("Failed to parse contract id: {err}"), + } + + // unpadded-hex + match contract_id_from_str("1") { + Ok(contract_id) => assert_eq!( + contract_id, + [ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, + ] + ), + Err(err) => panic!("Failed to parse contract id: {err}"), + } + + // invalid hex + match contract_id_from_str("foobar") { + Ok(_) => panic!("Expected parsing to fail"), + Err(err) => assert_eq!(err, stellar_strkey::DecodeError::Invalid), + } + + // hex too long (33 bytes) + match contract_id_from_str( + "000000000000000000000000000000000000000000000000000000000000000000", + ) { + Ok(_) => panic!("Expected parsing to fail"), + Err(err) => assert_eq!(err, stellar_strkey::DecodeError::Invalid), + } + } +} diff --git a/cmd/soroban-cli/src/utils/contract_spec.rs b/cmd/soroban-cli/src/utils/contract_spec.rs new file mode 100644 index 00000000..b4f24abe --- /dev/null +++ b/cmd/soroban-cli/src/utils/contract_spec.rs @@ -0,0 +1,276 @@ +use base64::{engine::general_purpose::STANDARD as base64, Engine as _}; +use std::{ + fmt::Display, + io::{self, Cursor}, +}; + +use soroban_env_host::xdr::{ + self, Limited, Limits, ReadXdr, ScEnvMetaEntry, ScMetaEntry, ScMetaV0, ScSpecEntry, + ScSpecFunctionV0, ScSpecUdtEnumV0, ScSpecUdtErrorEnumV0, ScSpecUdtStructV0, ScSpecUdtUnionV0, + StringM, WriteXdr, +}; + +pub struct ContractSpec { + pub env_meta_base64: Option, + pub env_meta: Vec, + pub meta_base64: Option, + pub meta: Vec, + pub spec_base64: Option, + pub spec: Vec, +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("reading file {filepath}: {error}")] + CannotReadContractFile { + filepath: std::path::PathBuf, + error: io::Error, + }, + #[error("cannot parse wasm file {file}: {error}")] + CannotParseWasm { + file: std::path::PathBuf, + error: wasmparser::BinaryReaderError, + }, + #[error("xdr processing error: {0}")] + Xdr(#[from] xdr::Error), + + #[error(transparent)] + Parser(#[from] wasmparser::BinaryReaderError), +} + +impl ContractSpec { + pub fn new(bytes: &[u8]) -> Result { + let mut env_meta: Option<&[u8]> = None; + let mut meta: Option<&[u8]> = None; + let mut spec: Option<&[u8]> = None; + for payload in wasmparser::Parser::new(0).parse_all(bytes) { + let payload = payload?; + if let wasmparser::Payload::CustomSection(section) = payload { + let out = match section.name() { + "contractenvmetav0" => &mut env_meta, + "contractmetav0" => &mut meta, + "contractspecv0" => &mut spec, + _ => continue, + }; + *out = Some(section.data()); + }; + } + + let mut env_meta_base64 = None; + let env_meta = if let Some(env_meta) = env_meta { + env_meta_base64 = Some(base64.encode(env_meta)); + let cursor = Cursor::new(env_meta); + let mut read = Limited::new(cursor, Limits::none()); + ScEnvMetaEntry::read_xdr_iter(&mut read).collect::, xdr::Error>>()? + } else { + vec![] + }; + + let mut meta_base64 = None; + let meta = if let Some(meta) = meta { + meta_base64 = Some(base64.encode(meta)); + let cursor = Cursor::new(meta); + let mut depth_limit_read = Limited::new(cursor, Limits::none()); + ScMetaEntry::read_xdr_iter(&mut depth_limit_read) + .collect::, xdr::Error>>()? + } else { + vec![] + }; + + let mut spec_base64 = None; + let spec = if let Some(spec) = spec { + spec_base64 = Some(base64.encode(spec)); + let cursor = Cursor::new(spec); + let mut read = Limited::new(cursor, Limits::none()); + ScSpecEntry::read_xdr_iter(&mut read).collect::, xdr::Error>>()? + } else { + vec![] + }; + + Ok(ContractSpec { + env_meta_base64, + env_meta, + meta_base64, + meta, + spec_base64, + spec, + }) + } + + pub fn spec_as_json_array(&self) -> Result { + let spec = self + .spec + .iter() + .map(|e| Ok(format!("\"{}\"", e.to_xdr_base64(Limits::none())?))) + .collect::, Error>>()? + .join(",\n"); + Ok(format!("[{spec}]")) + } +} + +impl Display for ContractSpec { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if let Some(env_meta) = &self.env_meta_base64 { + writeln!(f, "Env Meta: {env_meta}")?; + for env_meta_entry in &self.env_meta { + match env_meta_entry { + ScEnvMetaEntry::ScEnvMetaKindInterfaceVersion(v) => { + writeln!(f, " • Interface Version: {v}")?; + } + } + } + writeln!(f)?; + } else { + writeln!(f, "Env Meta: None\n")?; + } + + if let Some(_meta) = &self.meta_base64 { + writeln!(f, "Contract Meta:")?; + for meta_entry in &self.meta { + match meta_entry { + ScMetaEntry::ScMetaV0(ScMetaV0 { key, val }) => { + writeln!(f, " • {key}: {val}")?; + } + } + } + writeln!(f)?; + } else { + writeln!(f, "Contract Meta: None\n")?; + } + + if let Some(_spec_base64) = &self.spec_base64 { + writeln!(f, "Contract Spec:")?; + for spec_entry in &self.spec { + match spec_entry { + ScSpecEntry::FunctionV0(func) => write_func(f, func)?, + ScSpecEntry::UdtUnionV0(udt) => write_union(f, udt)?, + ScSpecEntry::UdtStructV0(udt) => write_struct(f, udt)?, + ScSpecEntry::UdtEnumV0(udt) => write_enum(f, udt)?, + ScSpecEntry::UdtErrorEnumV0(udt) => write_error(f, udt)?, + } + } + } else { + writeln!(f, "Contract Spec: None")?; + } + Ok(()) + } +} + +fn write_func(f: &mut std::fmt::Formatter<'_>, func: &ScSpecFunctionV0) -> std::fmt::Result { + writeln!(f, " • Function: {}", func.name.to_utf8_string_lossy())?; + if func.doc.len() > 0 { + writeln!( + f, + " Docs: {}", + &indent(&func.doc.to_utf8_string_lossy(), 11).trim() + )?; + } + writeln!( + f, + " Inputs: {}", + indent(&format!("{:#?}", func.inputs), 5).trim() + )?; + writeln!( + f, + " Output: {}", + indent(&format!("{:#?}", func.outputs), 5).trim() + )?; + writeln!(f)?; + Ok(()) +} + +fn write_union(f: &mut std::fmt::Formatter<'_>, udt: &ScSpecUdtUnionV0) -> std::fmt::Result { + writeln!(f, " • Union: {}", format_name(&udt.lib, &udt.name))?; + if udt.doc.len() > 0 { + writeln!( + f, + " Docs: {}", + indent(&udt.doc.to_utf8_string_lossy(), 10).trim() + )?; + } + writeln!(f, " Cases:")?; + for case in udt.cases.iter() { + writeln!(f, " • {}", indent(&format!("{case:#?}"), 8).trim())?; + } + writeln!(f)?; + Ok(()) +} + +fn write_struct(f: &mut std::fmt::Formatter<'_>, udt: &ScSpecUdtStructV0) -> std::fmt::Result { + writeln!(f, " • Struct: {}", format_name(&udt.lib, &udt.name))?; + if udt.doc.len() > 0 { + writeln!( + f, + " Docs: {}", + indent(&udt.doc.to_utf8_string_lossy(), 10).trim() + )?; + } + writeln!(f, " Fields:")?; + for field in udt.fields.iter() { + writeln!( + f, + " • {}: {}", + field.name.to_utf8_string_lossy(), + indent(&format!("{:#?}", field.type_), 8).trim() + )?; + if field.doc.len() > 0 { + writeln!(f, "{}", indent(&format!("{:#?}", field.doc), 8))?; + } + } + writeln!(f)?; + Ok(()) +} + +fn write_enum(f: &mut std::fmt::Formatter<'_>, udt: &ScSpecUdtEnumV0) -> std::fmt::Result { + writeln!(f, " • Enum: {}", format_name(&udt.lib, &udt.name))?; + if udt.doc.len() > 0 { + writeln!( + f, + " Docs: {}", + indent(&udt.doc.to_utf8_string_lossy(), 10).trim() + )?; + } + writeln!(f, " Cases:")?; + for case in udt.cases.iter() { + writeln!(f, " • {}", indent(&format!("{case:#?}"), 8).trim())?; + } + writeln!(f)?; + Ok(()) +} + +fn write_error(f: &mut std::fmt::Formatter<'_>, udt: &ScSpecUdtErrorEnumV0) -> std::fmt::Result { + writeln!(f, " • Error: {}", format_name(&udt.lib, &udt.name))?; + if udt.doc.len() > 0 { + writeln!( + f, + " Docs: {}", + indent(&udt.doc.to_utf8_string_lossy(), 10).trim() + )?; + } + writeln!(f, " Cases:")?; + for case in udt.cases.iter() { + writeln!(f, " • {}", indent(&format!("{case:#?}"), 8).trim())?; + } + writeln!(f)?; + Ok(()) +} + +fn indent(s: &str, n: usize) -> String { + let pad = " ".repeat(n); + s.lines() + .map(|line| format!("{pad}{line}")) + .collect::>() + .join("\n") +} + +fn format_name(lib: &StringM<80>, name: &StringM<60>) -> String { + if lib.len() > 0 { + format!( + "{}::{}", + lib.to_utf8_string_lossy(), + name.to_utf8_string_lossy() + ) + } else { + name.to_utf8_string_lossy() + } +} diff --git a/cmd/soroban-cli/src/wasm.rs b/cmd/soroban-cli/src/wasm.rs new file mode 100644 index 00000000..fce44c7c --- /dev/null +++ b/cmd/soroban-cli/src/wasm.rs @@ -0,0 +1,93 @@ +use clap::arg; +use soroban_env_host::xdr::{self, LedgerKey, LedgerKeyContractCode}; +use std::{ + fs, io, + path::{Path, PathBuf}, +}; + +use crate::utils::{self, contract_spec::ContractSpec}; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("reading file {filepath}: {error}")] + CannotReadContractFile { + filepath: std::path::PathBuf, + error: io::Error, + }, + #[error("cannot parse wasm file {file}: {error}")] + CannotParseWasm { + file: std::path::PathBuf, + error: wasmparser::BinaryReaderError, + }, + #[error("xdr processing error: {0}")] + Xdr(#[from] xdr::Error), + + #[error(transparent)] + Parser(#[from] wasmparser::BinaryReaderError), + #[error(transparent)] + ContractSpec(#[from] crate::utils::contract_spec::Error), +} + +#[derive(Debug, clap::Args, Clone)] +#[group(skip)] +pub struct Args { + /// Path to wasm binary + #[arg(long)] + pub wasm: PathBuf, +} + +impl Args { + /// # Errors + /// May fail to read wasm file + pub fn read(&self) -> Result, Error> { + fs::read(&self.wasm).map_err(|e| Error::CannotReadContractFile { + filepath: self.wasm.clone(), + error: e, + }) + } + + /// # Errors + /// May fail to read wasm file + pub fn len(&self) -> Result { + len(&self.wasm) + } + + /// # Errors + /// May fail to read wasm file + pub fn is_empty(&self) -> Result { + self.len().map(|len| len == 0) + } + + /// # Errors + /// May fail to read wasm file or parse xdr section + pub fn parse(&self) -> Result { + let contents = self.read()?; + Ok(ContractSpec::new(&contents)?) + } +} + +impl From<&PathBuf> for Args { + fn from(wasm: &PathBuf) -> Self { + Self { wasm: wasm.clone() } + } +} + +impl TryInto for Args { + type Error = Error; + fn try_into(self) -> Result { + Ok(LedgerKey::ContractCode(LedgerKeyContractCode { + hash: utils::contract_hash(&self.read()?)?, + })) + } +} + +/// # Errors +/// May fail to read wasm file +pub fn len(p: &Path) -> Result { + Ok(std::fs::metadata(p) + .map_err(|e| Error::CannotReadContractFile { + filepath: p.to_path_buf(), + error: e, + })? + .len()) +} diff --git a/cmd/soroban-rpc/README.md b/cmd/soroban-rpc/README.md new file mode 100644 index 00000000..da2baf4e --- /dev/null +++ b/cmd/soroban-rpc/README.md @@ -0,0 +1,58 @@ +# Soroban-RPC + +Soroban-RPC allows you to communicate directly with Soroban via a JSON RPC interface. + +For example, you can build an application and have it send a transaction, get ledger and event data or simulate transactions. + +## Dependencies + - [Git](https://git-scm.com/downloads) + - [Go](https://golang.org/doc/install) + - [Rust](https://www.rust-lang.org/tools/install) + - [Cargo](https://doc.rust-lang.org/cargo/getting-started/installation.html) + +## Building Stellar-Core +Soroban-RPC requires an instance of stellar-core binary on the same host. This is referred to as the `Captive Core`. +Since, we are building RPC from source, we recommend considering two approaches to get the stellar-core binary: +- If saving time is top priority and your development machine is on a linux debian OS, then consider installing the +testnet release candidates from the [testing repository.](https://apt.stellar.org/pool/unstable/s/stellar-core/) +- The recommended option is to compile the core source directly on your machine: + - Clone the stellar-core repo: + ```bash + git clone https://github.com/stellar/stellar-core.git + cd stellar-core + ``` + - Fetch the tags and checkout the testnet release tag: + ```bash + git fetch --tags + git checkout tags/v20.0.0-rc.2.1 -b soroban-testnet-release + ``` + - Follow the build steps listed in [INSTALL.md](https://github.com/stellar/stellar-core/blob/master/INSTALL.md) file for the instructions on building the local binary + +## Building Soroban-RPC +- Similar to stellar-core, we will clone the soroban-tools repo and checkout the testnet release tag: +```bash +git clone https://github.com/stellar/soroban-tools.git +cd soroban-tools +git fetch --tags +git checkout tags/v20.0.0-rc4 -b soroban-testnet-release +``` +- Build soroban-rpc target: +```bash +make build-soroban-rpc +``` +This will install and build the required dependencies and generate a `soroban-rpc` binary in the working directory. + +## Configuring and Running RPC Server +- Both stellar-core and soroban-rpc require configuration files to run. + - For production, we specifically recommend running Soroban RPC with a TOML configuration file rather than CLI flags. + - There is a new subcommand `gen-config-file` which takes all the same arguments as the root command (or no arguments at all), + and outputs the resulting config toml file to stdout. + ```bash + ./soroban-rpc gen-config-file + ``` + - Paste the output to a file and save it as `.toml` file in any directory. + - Make sure to update the config values to testnet specific ones. You can refer to [Configuring](https://docs.google.com/document/d/1SIbrFWFgju5RAsi6stDyEtgTa78VEt8f3HhqCLoySx4/edit#heading=h.80d1jdtd7ktj) section in the Runbook for specific config settings. +- If everything is set up correctly, then you can run the RPC server with the following command: +```bash +./soroban-rpc --config-path +``` \ No newline at end of file diff --git a/cmd/soroban-rpc/docker/Dockerfile b/cmd/soroban-rpc/docker/Dockerfile new file mode 100644 index 00000000..0b0cc231 --- /dev/null +++ b/cmd/soroban-rpc/docker/Dockerfile @@ -0,0 +1,39 @@ +FROM golang:1.21-bullseye as build +ARG RUST_TOOLCHAIN_VERSION=stable +ARG REPOSITORY_VERSION + +WORKDIR /go/src/github.com/stellar/soroban-tools + +ADD . ./ + +RUN git config --global --add safe.directory "/go/src/github.com/stellar/soroban-tools" + +ENV CARGO_HOME=/rust/.cargo +ENV RUSTUP_HOME=/rust/.rust +ENV PATH="/usr/local/go/bin:$CARGO_HOME/bin:${PATH}" +ENV DEBIAN_FRONTEND=noninteractive +RUN apt-get update +RUN apt-get install -y build-essential +RUN apt-get clean + +RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain $RUST_TOOLCHAIN_VERSION + +RUN make REPOSITORY_VERSION=${REPOSITORY_VERSION} build-soroban-rpc +RUN mv soroban-rpc /bin/soroban-rpc + +FROM ubuntu:22.04 +ARG STELLAR_CORE_VERSION +ENV STELLAR_CORE_VERSION=${STELLAR_CORE_VERSION:-*} +ENV STELLAR_CORE_BINARY_PATH /usr/bin/stellar-core +ENV DEBIAN_FRONTEND=noninteractive + +# ca-certificates are required to make tls connections +RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates curl wget gnupg apt-utils +RUN wget -qO - https://apt.stellar.org/SDF.asc | APT_KEY_DONT_WARN_ON_DANGEROUS_USAGE=true apt-key add - +RUN echo "deb https://apt.stellar.org focal stable" >/etc/apt/sources.list.d/SDF.list +RUN echo "deb https://apt.stellar.org focal unstable" >/etc/apt/sources.list.d/SDF-unstable.list +RUN apt-get update && apt-get install -y stellar-core=${STELLAR_CORE_VERSION} +RUN apt-get clean + +COPY --from=build /bin/soroban-rpc /app/ +ENTRYPOINT ["/app/soroban-rpc"] diff --git a/cmd/soroban-rpc/docker/Dockerfile.release b/cmd/soroban-rpc/docker/Dockerfile.release new file mode 100644 index 00000000..de894a8b --- /dev/null +++ b/cmd/soroban-rpc/docker/Dockerfile.release @@ -0,0 +1,20 @@ +FROM ubuntu:22.04 +ARG STELLAR_CORE_VERSION +ENV STELLAR_CORE_VERSION=${STELLAR_CORE_VERSION:-*} +ARG SOROBAN_RPC_VERSION +ENV SOROBAN_RPC_VERSION=${SOROBAN_RPC_VERSION:-*} + +ENV STELLAR_CORE_BINARY_PATH /usr/bin/stellar-core +ENV DEBIAN_FRONTEND=noninteractive + +# ca-certificates are required to make tls connections +RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates curl wget gnupg apt-utils gpg && \ + curl -sSL https://apt.stellar.org/SDF.asc | gpg --dearmor >/etc/apt/trusted.gpg.d/SDF.gpg && \ + echo "deb https://apt.stellar.org focal stable" >/etc/apt/sources.list.d/SDF.list && \ + echo "deb https://apt.stellar.org focal testing" >/etc/apt/sources.list.d/SDF-testing.list && \ + echo "deb https://apt.stellar.org focal unstable" >/etc/apt/sources.list.d/SDF-unstable.list && \ + apt-get update && \ + apt-get install -y stellar-core=${STELLAR_CORE_VERSION} stellar-soroban-rpc=${SOROBAN_RPC_VERSION} && \ + apt-get clean + +ENTRYPOINT ["/usr/bin/stellar-soroban-rpc"] diff --git a/cmd/soroban-rpc/docker/Makefile b/cmd/soroban-rpc/docker/Makefile new file mode 100644 index 00000000..b95af2b1 --- /dev/null +++ b/cmd/soroban-rpc/docker/Makefile @@ -0,0 +1,34 @@ +SUDO := $(shell docker version >/dev/null 2>&1 || echo "sudo") + +# https://github.com/opencontainers/image-spec/blob/master/annotations.md +BUILD_DATE := $(shell date -u +%FT%TZ) + +# Extract latest release semver from GitHub +SOROBAN_RPC_LATEST_RELEASE := $(shell curl -sS https://api.github.com/repos/stellar/soroban-tools/releases/latest|jq -r ".tag_name"| tr -d "v" ) + +# If deb version was provided via the SOROBAN_RPC_VERSION variable use it. +# If not get latest deb build matching release from GitHub +ifndef SOROBAN_RPC_VERSION + SOROBAN_RPC_VERSION_PACKAGE_VERSION := $(shell curl -sS https://apt.stellar.org/dists/focal/unstable/binary-amd64/Packages|grep -A 18 stellar-soroban-rpc|grep Version|grep $(SOROBAN_RPC_LATEST_RELEASE)|head -1|cut -d' ' -f2 ) +else + SOROBAN_RPC_VERSION_PACKAGE_VERSION := $(SOROBAN_RPC_VERSION) +endif + +ifndef SOROBAN_RPC_VERSION_PACKAGE_VERSION + $(error Couldn't establish deb build from version $(SOROBAN_RPC_LATEST_RELEASE). Has the package been built?) +endif + +ifndef STELLAR_CORE_VERSION + $(error STELLAR_CORE_VERSION environment variable must be set. For example 19.10.1-1310.6649f5173.focal~soroban) +endif + +TAG ?= stellar/stellar-soroban-rpc:$(SOROBAN_RPC_VERSION_PACKAGE_VERSION) + +docker-build: + $(SUDO) docker build --pull --platform linux/amd64 $(DOCKER_OPTS) \ + --label org.opencontainers.image.created="$(BUILD_DATE)" \ + --build-arg STELLAR_CORE_VERSION=$(STELLAR_CORE_VERSION) --build-arg SOROBAN_RPC_VERSION=$(SOROBAN_RPC_VERSION_PACKAGE_VERSION) \ + -t $(TAG) -f Dockerfile.release . + +docker-push: + $(SUDO) docker push $(TAG) diff --git a/cmd/soroban-rpc/internal/config/config.go b/cmd/soroban-rpc/internal/config/config.go new file mode 100644 index 00000000..1f89ab2b --- /dev/null +++ b/cmd/soroban-rpc/internal/config/config.go @@ -0,0 +1,164 @@ +package config + +import ( + "os" + "time" + + "github.com/sirupsen/logrus" + "github.com/spf13/pflag" +) + +// Config represents the configuration of a soroban-rpc server +type Config struct { + ConfigPath string + + Strict bool + + StellarCoreURL string + CaptiveCoreStoragePath string + StellarCoreBinaryPath string + CaptiveCoreConfigPath string + CaptiveCoreHTTPPort uint + + Endpoint string + AdminEndpoint string + CheckpointFrequency uint32 + CoreRequestTimeout time.Duration + DefaultEventsLimit uint + EventLedgerRetentionWindow uint32 + FriendbotURL string + HistoryArchiveURLs []string + IngestionTimeout time.Duration + LogFormat LogFormat + LogLevel logrus.Level + MaxEventsLimit uint + MaxHealthyLedgerLatency time.Duration + NetworkPassphrase string + PreflightWorkerCount uint + PreflightWorkerQueueSize uint + PreflightEnableDebug bool + SQLiteDBPath string + TransactionLedgerRetentionWindow uint32 + RequestBacklogGlobalQueueLimit uint + RequestBacklogGetHealthQueueLimit uint + RequestBacklogGetEventsQueueLimit uint + RequestBacklogGetNetworkQueueLimit uint + RequestBacklogGetLatestLedgerQueueLimit uint + RequestBacklogGetLedgerEntriesQueueLimit uint + RequestBacklogGetTransactionQueueLimit uint + RequestBacklogSendTransactionQueueLimit uint + RequestBacklogSimulateTransactionQueueLimit uint + RequestExecutionWarningThreshold time.Duration + MaxRequestExecutionDuration time.Duration + MaxGetHealthExecutionDuration time.Duration + MaxGetEventsExecutionDuration time.Duration + MaxGetNetworkExecutionDuration time.Duration + MaxGetLatestLedgerExecutionDuration time.Duration + MaxGetLedgerEntriesExecutionDuration time.Duration + MaxGetTransactionExecutionDuration time.Duration + MaxSendTransactionExecutionDuration time.Duration + MaxSimulateTransactionExecutionDuration time.Duration + + // We memoize these, so they bind to pflags correctly + optionsCache *ConfigOptions + flagset *pflag.FlagSet +} + +func (cfg *Config) SetValues(lookupEnv func(string) (string, bool)) error { + // We start with the defaults + if err := cfg.loadDefaults(); err != nil { + return err + } + + // Then we load from the environment variables and cli flags, to try to find + // the config file path + if err := cfg.loadEnv(lookupEnv); err != nil { + return err + } + if err := cfg.loadFlags(); err != nil { + return err + } + + // If we specified a config file, we load that + if cfg.ConfigPath != "" { + // Merge in the config file flags + if err := cfg.loadConfigPath(); err != nil { + return err + } + + // Load from cli flags and environment variables again, to overwrite what we + // got from the config file + if err := cfg.loadEnv(lookupEnv); err != nil { + return err + } + if err := cfg.loadFlags(); err != nil { + return err + } + } + + return nil +} + +// loadDefaults populates the config with default values +func (cfg *Config) loadDefaults() error { + for _, option := range cfg.options() { + if option.DefaultValue != nil { + if err := option.setValue(option.DefaultValue); err != nil { + return err + } + } + } + return nil +} + +// loadEnv populates the config with values from the environment variables +func (cfg *Config) loadEnv(lookupEnv func(string) (string, bool)) error { + for _, option := range cfg.options() { + key, ok := option.getEnvKey() + if !ok { + continue + } + value, ok := lookupEnv(key) + if !ok { + continue + } + if err := option.setValue(value); err != nil { + return err + } + } + return nil +} + +// loadFlags populates the config with values from the cli flags +func (cfg *Config) loadFlags() error { + for _, option := range cfg.options() { + if option.flag == nil || !option.flag.Changed { + continue + } + val, err := option.GetFlag(cfg.flagset) + if err != nil { + return err + } + if err := option.setValue(val); err != nil { + return err + } + } + return nil +} + +// loadConfigPath loads a new config from a toml file at the given path. Strict +// mode will return an error if there are any unknown toml variables set. Note, +// strict-mode can also be set by putting `STRICT=true` in the config.toml file +// itself. +func (cfg *Config) loadConfigPath() error { + file, err := os.Open(cfg.ConfigPath) + if err != nil { + return err + } + defer file.Close() + return parseToml(file, cfg.Strict, cfg) +} + +func (cfg *Config) Validate() error { + return cfg.options().Validate() +} diff --git a/cmd/soroban-rpc/internal/config/config_option.go b/cmd/soroban-rpc/internal/config/config_option.go new file mode 100644 index 00000000..86eab8e7 --- /dev/null +++ b/cmd/soroban-rpc/internal/config/config_option.go @@ -0,0 +1,144 @@ +package config + +import ( + "fmt" + "reflect" + "strconv" + "time" + + "github.com/spf13/pflag" + "github.com/stellar/go/support/errors" + "github.com/stellar/go/support/strutils" +) + +// ConfigOptions is a group of ConfigOptions that can be for convenience +// initialized and set at the same time. +type ConfigOptions []*ConfigOption + +// Validate all the config options. +func (options ConfigOptions) Validate() error { + var missingOptions []errMissingRequiredOption + for _, option := range options { + if option.Validate != nil { + err := option.Validate(option) + if err == nil { + continue + } + if missingOption, ok := err.(errMissingRequiredOption); ok { + missingOptions = append(missingOptions, missingOption) + continue + } + return errors.Wrap(err, fmt.Sprintf("Invalid config value for %s", option.Name)) + } + } + if len(missingOptions) > 0 { + // we had one or more missing options, combine these all into a single error. + errString := "The following required configuration parameters are missing:" + for _, missingOpt := range missingOptions { + errString += "\n*\t" + missingOpt.strErr + errString += "\n \t" + missingOpt.usage + } + return &errMissingRequiredOption{strErr: errString} + } + return nil +} + +// ConfigOption is a complete description of the configuration of a command line option +type ConfigOption struct { + Name string // e.g. "database-url" + EnvVar string // e.g. "DATABASE_URL". Defaults to uppercase/underscore representation of name + TomlKey string // e.g. "DATABASE_URL". Defaults to uppercase/underscore representation of name. - to omit from toml + Usage string // Help text + DefaultValue interface{} // A default if no option is provided. Omit or set to `nil` if no default + ConfigKey interface{} // Pointer to the final key in the linked Config struct + CustomSetValue func(*ConfigOption, interface{}) error // Optional function for custom validation/transformation + Validate func(*ConfigOption) error // Function called after loading all options, to validate the configuration + MarshalTOML func(*ConfigOption) (interface{}, error) + + flag *pflag.Flag // The persistent flag that the config option is attached to +} + +// Returns false if this option is omitted in the toml +func (o ConfigOption) getTomlKey() (string, bool) { + if o.TomlKey == "-" || o.TomlKey == "_" { + return "", false + } + if o.TomlKey != "" { + return o.TomlKey, true + } + if envVar, ok := o.getEnvKey(); ok { + return envVar, true + } + return strutils.KebabToConstantCase(o.Name), true +} + +// Returns false if this option is omitted in the env +func (o ConfigOption) getEnvKey() (string, bool) { + if o.EnvVar == "-" || o.EnvVar == "_" { + return "", false + } + if o.EnvVar != "" { + return o.EnvVar, true + } + return strutils.KebabToConstantCase(o.Name), true +} + +// TODO: See if we can remove CustomSetValue into just SetValue/ParseValue +func (o *ConfigOption) setValue(i interface{}) (err error) { + if o.CustomSetValue != nil { + return o.CustomSetValue(o, i) + } + // it's unfortunate that Set below panics when it cannot set the value.. + // we'll want to catch this so that we can alert the user nicely. + defer func() { + if recoverRes := recover(); recoverRes != nil { + var ok bool + if err, ok = recoverRes.(error); ok { + return + } + + err = errors.Errorf("config option setting error ('%s') %v", o.Name, recoverRes) + } + }() + parser := func(option *ConfigOption, i interface{}) error { + panic(fmt.Sprintf("no parser for flag %s", o.Name)) + } + switch o.ConfigKey.(type) { + case *bool: + parser = parseBool + case *int, *int8, *int16, *int32, *int64: + parser = parseInt + case *uint, *uint8, *uint16, *uint32: + parser = parseUint32 + case *uint64: + parser = parseUint + case *float32, *float64: + parser = parseFloat + case *string: + parser = parseString + case *[]string: + parser = parseStringSlice + case *time.Duration: + parser = parseDuration + } + + return parser(o, i) +} + +func (o *ConfigOption) marshalTOML() (interface{}, error) { + if o.MarshalTOML != nil { + return o.MarshalTOML(o) + } + // go-toml doesn't handle ints other than `int`, so we have to do that ourselves. + switch v := o.ConfigKey.(type) { + case *int, *int8, *int16, *int32, *int64: + return []byte(strconv.FormatInt(reflect.ValueOf(v).Elem().Int(), 10)), nil + case *uint, *uint8, *uint16, *uint32, *uint64: + return []byte(strconv.FormatUint(reflect.ValueOf(v).Elem().Uint(), 10)), nil + case *time.Duration: + return v.String(), nil + default: + // Unknown, hopefully go-toml knows what to do with it! :crossed_fingers: + return reflect.ValueOf(o.ConfigKey).Elem().Interface(), nil + } +} diff --git a/cmd/soroban-rpc/internal/config/config_option_test.go b/cmd/soroban-rpc/internal/config/config_option_test.go new file mode 100644 index 00000000..831c8865 --- /dev/null +++ b/cmd/soroban-rpc/internal/config/config_option_test.go @@ -0,0 +1,260 @@ +package config + +import ( + "fmt" + "math" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestConfigOptionGetTomlKey(t *testing.T) { + // Explicitly set toml key + key, ok := ConfigOption{TomlKey: "TOML_KEY"}.getTomlKey() + assert.Equal(t, "TOML_KEY", key) + assert.True(t, ok) + + // Explicitly disabled toml key via `-` + key, ok = ConfigOption{TomlKey: "-"}.getTomlKey() + assert.Equal(t, "", key) + assert.False(t, ok) + + // Explicitly disabled toml key via `_` + key, ok = ConfigOption{TomlKey: "_"}.getTomlKey() + assert.Equal(t, "", key) + assert.False(t, ok) + + // Fallback to env var + key, ok = ConfigOption{EnvVar: "ENV_VAR"}.getTomlKey() + assert.Equal(t, "ENV_VAR", key) + assert.True(t, ok) + + // Env-var disabled, autogenerate from name + key, ok = ConfigOption{Name: "test-flag", EnvVar: "-"}.getTomlKey() + assert.Equal(t, "TEST_FLAG", key) + assert.True(t, ok) + + // Env-var not set, autogenerate from name + key, ok = ConfigOption{Name: "test-flag"}.getTomlKey() + assert.Equal(t, "TEST_FLAG", key) + assert.True(t, ok) +} + +func TestValidateRequired(t *testing.T) { + var strVal string + o := &ConfigOption{ + Name: "required-option", + ConfigKey: &strVal, + Validate: required, + } + + // unset + assert.ErrorContains(t, o.Validate(o), "required-option is required") + + // set with blank value + require.NoError(t, o.setValue("")) + assert.ErrorContains(t, o.Validate(o), "required-option is required") + + // set with valid value + require.NoError(t, o.setValue("not-blank")) + assert.NoError(t, o.Validate(o)) +} + +func TestValidatePositiveUint32(t *testing.T) { + var val uint32 + o := &ConfigOption{ + Name: "positive-option", + ConfigKey: &val, + Validate: positive, + } + + // unset + assert.ErrorContains(t, o.Validate(o), "positive-option must be positive") + + // set with 0 value + require.NoError(t, o.setValue(uint32(0))) + assert.ErrorContains(t, o.Validate(o), "positive-option must be positive") + + // set with valid value + require.NoError(t, o.setValue(uint32(1))) + assert.NoError(t, o.Validate(o)) +} + +func TestValidatePositiveInt(t *testing.T) { + var val int + o := &ConfigOption{ + Name: "positive-option", + ConfigKey: &val, + Validate: positive, + } + + // unset + assert.ErrorContains(t, o.Validate(o), "positive-option must be positive") + + // set with 0 value + require.NoError(t, o.setValue(0)) + assert.ErrorContains(t, o.Validate(o), "positive-option must be positive") + + // set with negative value + require.NoError(t, o.setValue(-1)) + assert.ErrorContains(t, o.Validate(o), "positive-option must be positive") + + // set with valid value + require.NoError(t, o.setValue(1)) + assert.NoError(t, o.Validate(o)) +} + +func TestUnassignableField(t *testing.T) { + var co ConfigOption + var b bool + co.Name = "mykey" + co.ConfigKey = &b + err := co.setValue("abc") + require.Error(t, err) + require.Contains(t, err.Error(), co.Name) +} + +func TestSetValue(t *testing.T) { + var b bool + var i int + var u32 uint32 + var u64 uint64 + var f64 float64 + var s string + + for _, scenario := range []struct { + name string + key interface{} + value interface{} + err error + }{ + { + name: "valid-bool", + key: &b, + value: true, + err: nil, + }, + { + name: "valid-bool-string", + key: &b, + value: "true", + err: nil, + }, + { + name: "valid-bool-string-false", + key: &b, + value: "false", + err: nil, + }, + { + name: "valid-bool-string-uppercase", + key: &b, + value: "TRUE", + err: nil, + }, + { + name: "invalid-bool-string", + key: &b, + value: "foobar", + err: fmt.Errorf("invalid boolean value invalid-bool-string: foobar"), + }, + { + name: "invalid-bool-string", + key: &b, + value: "foobar", + err: fmt.Errorf("invalid boolean value invalid-bool-string: foobar"), + }, + { + name: "valid-int", + key: &i, + value: 1, + err: nil, + }, + { + name: "valid-int-string", + key: &i, + value: "1", + err: nil, + }, + { + name: "invalid-int-string", + key: &i, + value: "abcd", + err: fmt.Errorf("strconv.ParseInt: parsing \"abcd\": invalid syntax"), + }, + { + name: "valid-uint32", + key: &u32, + value: 1, + err: nil, + }, + { + name: "overflow-uint32", + key: &u32, + value: uint64(math.MaxUint32) + 1, + err: fmt.Errorf("overflow-uint32 overflows uint32"), + }, + { + name: "negative-uint32", + key: &u32, + value: -1, + err: fmt.Errorf("negative-uint32 cannot be negative"), + }, + { + name: "valid-uint", + key: &u64, + value: 1, + err: nil, + }, + { + name: "negative-uint", + key: &u64, + value: -1, + err: fmt.Errorf("negative-uint cannot be negative"), + }, + { + name: "valid-float", + key: &f64, + value: 1.05, + err: nil, + }, + { + name: "valid-float-int", + key: &f64, + value: int64(1234), + err: nil, + }, + { + name: "valid-float-string", + key: &f64, + value: "1.05", + err: nil, + }, + { + name: "invalid-float-string", + key: &f64, + value: "foobar", + err: fmt.Errorf("strconv.ParseFloat: parsing \"foobar\": invalid syntax"), + }, + { + name: "valid-string", + key: &s, + value: "foobar", + err: nil, + }, + } { + t.Run(scenario.name, func(t *testing.T) { + co := ConfigOption{ + Name: scenario.name, + ConfigKey: scenario.key, + } + err := co.setValue(scenario.value) + if scenario.err != nil { + require.EqualError(t, err, scenario.err.Error()) + } else { + require.NoError(t, err) + } + }) + } +} diff --git a/cmd/soroban-rpc/internal/config/config_test.go b/cmd/soroban-rpc/internal/config/config_test.go new file mode 100644 index 00000000..67769a3c --- /dev/null +++ b/cmd/soroban-rpc/internal/config/config_test.go @@ -0,0 +1,79 @@ +package config + +import ( + "runtime" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestLoadConfigPathPrecedence(t *testing.T) { + var cfg Config + + cmd := &cobra.Command{} + require.NoError(t, cfg.AddFlags(cmd)) + require.NoError(t, cmd.ParseFlags([]string{ + "--config-path", "./test.soroban.rpc.config", + "--stellar-core-binary-path", "/usr/overridden/stellar-core", + "--network-passphrase", "CLI test passphrase", + })) + + require.NoError(t, cfg.SetValues(func(key string) (string, bool) { + switch key { + case "STELLAR_CORE_BINARY_PATH": + return "/env/stellar-core", true + case "DB_PATH": + return "/env/overridden/db", true + default: + return "", false + } + })) + require.NoError(t, cfg.Validate()) + + assert.Equal(t, "/opt/stellar/soroban-rpc/etc/stellar-captive-core.cfg", cfg.CaptiveCoreConfigPath, "should read values from the config path file") + assert.Equal(t, "CLI test passphrase", cfg.NetworkPassphrase, "cli flags should override --config-path values") + assert.Equal(t, "/usr/overridden/stellar-core", cfg.StellarCoreBinaryPath, "cli flags should override --config-path values and env vars") + assert.Equal(t, "/env/overridden/db", cfg.SQLiteDBPath, "env var should override config file") + assert.Equal(t, 2*time.Second, cfg.CoreRequestTimeout, "default value should be used, if not set anywhere else") +} + +func TestConfigLoadDefaults(t *testing.T) { + // Set up a default config + cfg := Config{} + require.NoError(t, cfg.loadDefaults()) + + // Check that the defaults are set + assert.Equal(t, defaultHTTPEndpoint, cfg.Endpoint) + assert.Equal(t, uint(runtime.NumCPU()), cfg.PreflightWorkerCount) +} + +func TestConfigLoadFlagsDefaultValuesOverrideExisting(t *testing.T) { + // Set up a config with an existing non-default value + cfg := Config{ + NetworkPassphrase: "existing value", + LogLevel: logrus.InfoLevel, + Endpoint: "localhost:8000", + } + + cmd := &cobra.Command{} + require.NoError(t, cfg.AddFlags(cmd)) + // Set up a flag set with the default value + require.NoError(t, cmd.ParseFlags([]string{ + "--network-passphrase", "", + "--log-level", logrus.PanicLevel.String(), + })) + + // Load the flags + require.NoError(t, cfg.loadFlags()) + + // Check that the flag value is set + assert.Equal(t, "", cfg.NetworkPassphrase) + assert.Equal(t, logrus.PanicLevel, cfg.LogLevel) + + // Check it didn't overwrite values which were not set in the flags + assert.Equal(t, "localhost:8000", cfg.Endpoint) +} diff --git a/cmd/soroban-rpc/internal/config/flags.go b/cmd/soroban-rpc/internal/config/flags.go new file mode 100644 index 00000000..d313aa31 --- /dev/null +++ b/cmd/soroban-rpc/internal/config/flags.go @@ -0,0 +1,172 @@ +package config + +import ( + "fmt" + "net" + "time" + + "github.com/spf13/cobra" + "github.com/spf13/pflag" +) + +// Init adds the CLI flags to the command. This lets the command output the +// flags as part of the --help output. +func (cfg *Config) AddFlags(cmd *cobra.Command) error { + cfg.flagset = cmd.PersistentFlags() + for _, option := range cfg.options() { + if err := option.AddFlag(cfg.flagset); err != nil { + return err + } + } + return nil +} + +// AddFlag adds a CLI flag for this option to the given flagset. +func (co *ConfigOption) AddFlag(flagset *pflag.FlagSet) error { + // config options that has no names do not represent a valid flag. + if len(co.Name) == 0 { + return nil + } + // Treat any option with a custom parser as a string option. + if co.CustomSetValue != nil { + if co.DefaultValue == nil { + co.DefaultValue = "" + } + flagset.String(co.Name, fmt.Sprint(co.DefaultValue), co.UsageText()) + co.flag = flagset.Lookup(co.Name) + return nil + } + + // Infer the type of the flag based on the type of the ConfigKey. This list + // of options is based on the available flag types from pflags + switch co.ConfigKey.(type) { + case *bool: + flagset.Bool(co.Name, co.DefaultValue.(bool), co.UsageText()) + case *time.Duration: + flagset.Duration(co.Name, co.DefaultValue.(time.Duration), co.UsageText()) + case *float32: + flagset.Float32(co.Name, co.DefaultValue.(float32), co.UsageText()) + case *float64: + flagset.Float64(co.Name, co.DefaultValue.(float64), co.UsageText()) + case *net.IP: + flagset.IP(co.Name, co.DefaultValue.(net.IP), co.UsageText()) + case *net.IPNet: + flagset.IPNet(co.Name, co.DefaultValue.(net.IPNet), co.UsageText()) + case *int: + flagset.Int(co.Name, co.DefaultValue.(int), co.UsageText()) + case *int8: + flagset.Int8(co.Name, co.DefaultValue.(int8), co.UsageText()) + case *int16: + flagset.Int16(co.Name, co.DefaultValue.(int16), co.UsageText()) + case *int32: + flagset.Int32(co.Name, co.DefaultValue.(int32), co.UsageText()) + case *int64: + flagset.Int64(co.Name, co.DefaultValue.(int64), co.UsageText()) + case *[]int: + flagset.IntSlice(co.Name, co.DefaultValue.([]int), co.UsageText()) + case *[]int32: + flagset.Int32Slice(co.Name, co.DefaultValue.([]int32), co.UsageText()) + case *[]int64: + flagset.Int64Slice(co.Name, co.DefaultValue.([]int64), co.UsageText()) + case *string: + // Set an empty string if no default was provided, since some value is always required for pflags + if co.DefaultValue == nil { + co.DefaultValue = "" + } + flagset.String(co.Name, co.DefaultValue.(string), co.UsageText()) + case *[]string: + // Set an empty string if no default was provided, since some value is always required for pflags + if co.DefaultValue == nil { + co.DefaultValue = []string{} + } + flagset.StringSlice(co.Name, co.DefaultValue.([]string), co.UsageText()) + case *uint: + flagset.Uint(co.Name, co.DefaultValue.(uint), co.UsageText()) + case *uint8: + flagset.Uint8(co.Name, co.DefaultValue.(uint8), co.UsageText()) + case *uint16: + flagset.Uint16(co.Name, co.DefaultValue.(uint16), co.UsageText()) + case *uint32: + flagset.Uint32(co.Name, co.DefaultValue.(uint32), co.UsageText()) + case *uint64: + flagset.Uint64(co.Name, co.DefaultValue.(uint64), co.UsageText()) + case *[]uint: + flagset.UintSlice(co.Name, co.DefaultValue.([]uint), co.UsageText()) + default: + return fmt.Errorf("unexpected option type: %T", co.ConfigKey) + } + + co.flag = flagset.Lookup(co.Name) + return nil +} + +func (co *ConfigOption) GetFlag(flagset *pflag.FlagSet) (interface{}, error) { + // Treat any option with a custom parser as a string option. + if co.CustomSetValue != nil { + return flagset.GetString(co.Name) + } + + // Infer the type of the flag based on the type of the ConfigKey. This list + // of options is based on the available flag types from pflags, and must + // match the above in `AddFlag`. + switch co.ConfigKey.(type) { + case *bool: + return flagset.GetBool(co.Name) + case *time.Duration: + return flagset.GetDuration(co.Name) + case *float32: + return flagset.GetFloat32(co.Name) + case *float64: + return flagset.GetFloat64(co.Name) + case *net.IP: + return flagset.GetIP(co.Name) + case *net.IPNet: + return flagset.GetIPNet(co.Name) + case *int: + return flagset.GetInt(co.Name) + case *int8: + return flagset.GetInt8(co.Name) + case *int16: + return flagset.GetInt16(co.Name) + case *int32: + return flagset.GetInt32(co.Name) + case *int64: + return flagset.GetInt64(co.Name) + case *[]int: + return flagset.GetIntSlice(co.Name) + case *[]int32: + return flagset.GetInt32Slice(co.Name) + case *[]int64: + return flagset.GetInt64Slice(co.Name) + case *string: + return flagset.GetString(co.Name) + case *[]string: + return flagset.GetStringSlice(co.Name) + case *uint: + return flagset.GetUint(co.Name) + case *uint8: + return flagset.GetUint8(co.Name) + case *uint16: + return flagset.GetUint16(co.Name) + case *uint32: + return flagset.GetUint32(co.Name) + case *uint64: + return flagset.GetUint64(co.Name) + case *[]uint: + return flagset.GetUintSlice(co.Name) + default: + return nil, fmt.Errorf("unexpected option type: %T", co.ConfigKey) + } +} + +// UsageText returns the string to use for the usage text of the option. The +// string returned will be the Usage defined on the ConfigOption, along with +// the environment variable. +func (co *ConfigOption) UsageText() string { + envVar, hasEnvVar := co.getEnvKey() + if hasEnvVar { + return fmt.Sprintf("%s (%s)", co.Usage, envVar) + } else { + return co.Usage + } +} diff --git a/cmd/soroban-rpc/internal/config/log_format.go b/cmd/soroban-rpc/internal/config/log_format.go new file mode 100644 index 00000000..076e43e6 --- /dev/null +++ b/cmd/soroban-rpc/internal/config/log_format.go @@ -0,0 +1,59 @@ +package config + +import "fmt" + +type LogFormat int + +const ( + LogFormatText LogFormat = iota + LogFormatJSON +) + +func (f LogFormat) MarshalText() ([]byte, error) { + switch f { + case LogFormatText: + return []byte("text"), nil + case LogFormatJSON: + return []byte("json"), nil + default: + return nil, fmt.Errorf("unknown log format: %d", f) + } +} + +func (f *LogFormat) UnmarshalText(text []byte) error { + switch string(text) { + case "text": + *f = LogFormatText + case "json": + *f = LogFormatJSON + default: + return fmt.Errorf("unknown log format: %s", text) + } + return nil +} + +func (f LogFormat) MarshalTOML() ([]byte, error) { + return f.MarshalText() +} + +func (f *LogFormat) UnmarshalTOML(i interface{}) error { + switch v := i.(type) { + case []byte: + return f.UnmarshalText(v) + case string: + return f.UnmarshalText([]byte(v)) + default: + return fmt.Errorf("unknown log format: %v", v) + } +} + +func (f LogFormat) String() string { + switch f { + case LogFormatText: + return "text" + case LogFormatJSON: + return "json" + default: + panic(fmt.Sprintf("unknown log format: %d", f)) + } +} diff --git a/cmd/soroban-rpc/internal/config/options.go b/cmd/soroban-rpc/internal/config/options.go new file mode 100644 index 00000000..cecfb2e7 --- /dev/null +++ b/cmd/soroban-rpc/internal/config/options.go @@ -0,0 +1,464 @@ +package config + +import ( + "fmt" + "os" + "os/exec" + "reflect" + "runtime" + "time" + + "github.com/sirupsen/logrus" + + "github.com/stellar/go/network" + "github.com/stellar/go/support/errors" + "github.com/stellar/go/support/strutils" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/ledgerbucketwindow" +) + +const defaultHTTPEndpoint = "localhost:8000" + +func (cfg *Config) options() ConfigOptions { + if cfg.optionsCache != nil { + return *cfg.optionsCache + } + defaultStellarCoreBinaryPath, _ := exec.LookPath("stellar-core") + cfg.optionsCache = &ConfigOptions{ + { + Name: "config-path", + EnvVar: "SOROBAN_RPC_CONFIG_PATH", + TomlKey: "-", + Usage: "File path to the toml configuration file", + ConfigKey: &cfg.ConfigPath, + }, + { + Name: "config-strict", + EnvVar: "SOROBAN_RPC_CONFIG_STRICT", + TomlKey: "STRICT", + Usage: "Enable strict toml configuration file parsing. This will prevent unknown fields in the config toml from being parsed.", + ConfigKey: &cfg.Strict, + DefaultValue: false, + }, + { + Name: "endpoint", + Usage: "Endpoint to listen and serve on", + ConfigKey: &cfg.Endpoint, + DefaultValue: defaultHTTPEndpoint, + }, + { + Name: "admin-endpoint", + Usage: "Admin endpoint to listen and serve on. WARNING: this should not be accessible from the Internet and does not use TLS. \"\" (default) disables the admin server", + ConfigKey: &cfg.AdminEndpoint, + }, + { + Name: "stellar-core-url", + Usage: "URL used to query Stellar Core (local captive core by default)", + ConfigKey: &cfg.StellarCoreURL, + Validate: func(co *ConfigOption) error { + // This is a bit awkward. We're actually setting a default, but we + // can't do that until the config is fully parsed, so we do it as a + // validator here. + if cfg.StellarCoreURL == "" { + cfg.StellarCoreURL = fmt.Sprintf("http://localhost:%d", cfg.CaptiveCoreHTTPPort) + } + return nil + }, + }, + { + Name: "stellar-core-timeout", + Usage: "Timeout used when submitting requests to stellar-core", + ConfigKey: &cfg.CoreRequestTimeout, + DefaultValue: 2 * time.Second, + }, + { + Name: "stellar-captive-core-http-port", + Usage: "HTTP port for Captive Core to listen on (0 disables the HTTP server)", + ConfigKey: &cfg.CaptiveCoreHTTPPort, + DefaultValue: uint(11626), + }, + { + Name: "log-level", + Usage: "minimum log severity (debug, info, warn, error) to log", + ConfigKey: &cfg.LogLevel, + DefaultValue: logrus.InfoLevel, + CustomSetValue: func(option *ConfigOption, i interface{}) error { + switch v := i.(type) { + case nil: + return nil + case string: + ll, err := logrus.ParseLevel(v) + if err != nil { + return fmt.Errorf("could not parse %s: %q", option.Name, v) + } + cfg.LogLevel = ll + case logrus.Level: + cfg.LogLevel = v + case *logrus.Level: + cfg.LogLevel = *v + default: + return fmt.Errorf("could not parse %s: %q", option.Name, v) + } + return nil + }, + MarshalTOML: func(option *ConfigOption) (interface{}, error) { + return cfg.LogLevel.String(), nil + }, + }, + { + Name: "log-format", + Usage: "format used for output logs (json or text)", + ConfigKey: &cfg.LogFormat, + DefaultValue: LogFormatText, + CustomSetValue: func(option *ConfigOption, i interface{}) error { + switch v := i.(type) { + case nil: + return nil + case string: + return errors.Wrapf( + cfg.LogFormat.UnmarshalText([]byte(v)), + "could not parse %s", + option.Name, + ) + case LogFormat: + cfg.LogFormat = v + case *LogFormat: + cfg.LogFormat = *v + default: + return fmt.Errorf("could not parse %s: %q", option.Name, v) + } + return nil + }, + MarshalTOML: func(option *ConfigOption) (interface{}, error) { + return cfg.LogFormat.String(), nil + }, + }, + { + Name: "stellar-core-binary-path", + Usage: "path to stellar core binary", + ConfigKey: &cfg.StellarCoreBinaryPath, + DefaultValue: defaultStellarCoreBinaryPath, + Validate: required, + }, + { + Name: "captive-core-config-path", + Usage: "path to additional configuration for the Stellar Core configuration file used by captive core. It must, at least, include enough details to define a quorum set", + ConfigKey: &cfg.CaptiveCoreConfigPath, + Validate: required, + }, + { + Name: "captive-core-storage-path", + Usage: "Storage location for Captive Core bucket data", + ConfigKey: &cfg.CaptiveCoreStoragePath, + CustomSetValue: func(option *ConfigOption, i interface{}) error { + switch v := i.(type) { + case string: + if v == "" || v == "." { + cwd, err := os.Getwd() + if err != nil { + return fmt.Errorf("unable to determine the current directory: %s", err) + } + v = cwd + } + cfg.CaptiveCoreStoragePath = v + return nil + case nil: + cwd, err := os.Getwd() + if err != nil { + return fmt.Errorf("unable to determine the current directory: %s", err) + } + cfg.CaptiveCoreStoragePath = cwd + return nil + default: + return fmt.Errorf("could not parse %s: %v", option.Name, v) + } + }, + }, + { + Name: "history-archive-urls", + Usage: "comma-separated list of stellar history archives to connect with", + ConfigKey: &cfg.HistoryArchiveURLs, + Validate: required, + }, + { + Name: "friendbot-url", + Usage: "The friendbot URL to be returned by getNetwork endpoint", + ConfigKey: &cfg.FriendbotURL, + }, + { + Name: "network-passphrase", + Usage: "Network passphrase of the Stellar network transactions should be signed for. Commonly used values are \"" + network.FutureNetworkPassphrase + "\", \"" + network.TestNetworkPassphrase + "\" and \"" + network.PublicNetworkPassphrase + "\"", + ConfigKey: &cfg.NetworkPassphrase, + Validate: required, + }, + { + Name: "db-path", + Usage: "SQLite DB path", + ConfigKey: &cfg.SQLiteDBPath, + DefaultValue: "soroban_rpc.sqlite", + }, + { + Name: "ingestion-timeout", + Usage: "Ingestion Timeout when bootstrapping data (checkpoint and in-memory initialization) and preparing ledger reads", + ConfigKey: &cfg.IngestionTimeout, + DefaultValue: 30 * time.Minute, + }, + { + Name: "checkpoint-frequency", + Usage: "establishes how many ledgers exist between checkpoints, do NOT change this unless you really know what you are doing", + ConfigKey: &cfg.CheckpointFrequency, + DefaultValue: uint32(64), + }, + { + Name: "event-retention-window", + Usage: fmt.Sprintf("configures the event retention window expressed in number of ledgers,"+ + " the default value is %d which corresponds to about 24 hours of history", ledgerbucketwindow.DefaultEventLedgerRetentionWindow), + ConfigKey: &cfg.EventLedgerRetentionWindow, + DefaultValue: uint32(ledgerbucketwindow.DefaultEventLedgerRetentionWindow), + Validate: positive, + }, + { + Name: "transaction-retention-window", + Usage: "configures the transaction retention window expressed in number of ledgers," + + " the default value is 1440 which corresponds to about 2 hours of history", + ConfigKey: &cfg.TransactionLedgerRetentionWindow, + DefaultValue: uint32(1440), + Validate: positive, + }, + { + Name: "max-events-limit", + Usage: "Maximum amount of events allowed in a single getEvents response", + ConfigKey: &cfg.MaxEventsLimit, + DefaultValue: uint(10000), + }, + { + Name: "default-events-limit", + Usage: "Default cap on the amount of events included in a single getEvents response", + ConfigKey: &cfg.DefaultEventsLimit, + DefaultValue: uint(100), + Validate: func(co *ConfigOption) error { + if cfg.DefaultEventsLimit > cfg.MaxEventsLimit { + return fmt.Errorf( + "default-events-limit (%v) cannot exceed max-events-limit (%v)", + cfg.DefaultEventsLimit, + cfg.MaxEventsLimit, + ) + } + return nil + }, + }, + { + Name: "max-healthy-ledger-latency", + Usage: "maximum ledger latency (i.e. time elapsed since the last known ledger closing time) considered to be healthy" + + " (used for the /health endpoint)", + ConfigKey: &cfg.MaxHealthyLedgerLatency, + DefaultValue: 30 * time.Second, + }, + { + Name: "preflight-worker-count", + Usage: "Number of workers (read goroutines) used to compute preflights for the simulateTransaction endpoint. Defaults to the number of CPUs.", + ConfigKey: &cfg.PreflightWorkerCount, + DefaultValue: uint(runtime.NumCPU()), + Validate: positive, + }, + { + Name: "preflight-worker-queue-size", + Usage: "Maximum number of outstanding preflight requests for the simulateTransaction endpoint. Defaults to the number of CPUs.", + ConfigKey: &cfg.PreflightWorkerQueueSize, + DefaultValue: uint(runtime.NumCPU()), + Validate: positive, + }, + { + Name: "preflight-enable-debug", + Usage: "Enable debug information in preflighting (provides more detailed errors). It should not be enabled in production deployments.", + ConfigKey: &cfg.PreflightEnableDebug, + DefaultValue: true, + }, + { + TomlKey: strutils.KebabToConstantCase("request-backlog-global-queue-limit"), + Usage: "Maximum number of outstanding requests", + ConfigKey: &cfg.RequestBacklogGlobalQueueLimit, + DefaultValue: uint(5000), + Validate: positive, + }, + { + TomlKey: strutils.KebabToConstantCase("request-backlog-get-health-queue-limit"), + Usage: "Maximum number of outstanding GetHealth requests", + ConfigKey: &cfg.RequestBacklogGetHealthQueueLimit, + DefaultValue: uint(1000), + Validate: positive, + }, + { + TomlKey: strutils.KebabToConstantCase("request-backlog-get-events-queue-limit"), + Usage: "Maximum number of outstanding GetEvents requests", + ConfigKey: &cfg.RequestBacklogGetEventsQueueLimit, + DefaultValue: uint(1000), + Validate: positive, + }, + { + TomlKey: strutils.KebabToConstantCase("request-backlog-get-network-queue-limit"), + Usage: "Maximum number of outstanding GetNetwork requests", + ConfigKey: &cfg.RequestBacklogGetNetworkQueueLimit, + DefaultValue: uint(1000), + Validate: positive, + }, + { + TomlKey: strutils.KebabToConstantCase("request-backlog-get-latest-ledger-queue-limit"), + Usage: "Maximum number of outstanding GetLatestsLedger requests", + ConfigKey: &cfg.RequestBacklogGetLatestLedgerQueueLimit, + DefaultValue: uint(1000), + Validate: positive, + }, + { + TomlKey: strutils.KebabToConstantCase("request-backlog-get-ledger-entries-queue-limit"), + Usage: "Maximum number of outstanding GetLedgerEntries requests", + ConfigKey: &cfg.RequestBacklogGetLedgerEntriesQueueLimit, + DefaultValue: uint(1000), + Validate: positive, + }, + { + TomlKey: strutils.KebabToConstantCase("request-backlog-get-transaction-queue-limit"), + Usage: "Maximum number of outstanding GetTransaction requests", + ConfigKey: &cfg.RequestBacklogGetTransactionQueueLimit, + DefaultValue: uint(1000), + Validate: positive, + }, + { + TomlKey: strutils.KebabToConstantCase("request-backlog-send-transaction-queue-limit"), + Usage: "Maximum number of outstanding SendTransaction requests", + ConfigKey: &cfg.RequestBacklogSendTransactionQueueLimit, + DefaultValue: uint(500), + Validate: positive, + }, + { + TomlKey: strutils.KebabToConstantCase("request-backlog-simulate-transaction-queue-limit"), + Usage: "Maximum number of outstanding SimulateTransaction requests", + ConfigKey: &cfg.RequestBacklogSimulateTransactionQueueLimit, + DefaultValue: uint(100), + Validate: positive, + }, + { + TomlKey: strutils.KebabToConstantCase("request-execution-warning-threshold"), + Usage: "The request execution warning threshold is the predetermined maximum duration of time that a request can take to be processed before a warning would be generated", + ConfigKey: &cfg.RequestExecutionWarningThreshold, + DefaultValue: 5 * time.Second, + }, + { + TomlKey: strutils.KebabToConstantCase("max-request-execution-duration"), + Usage: "The max request execution duration is the predefined maximum duration of time allowed for processing a request. When that time elapses, the server would return 504 and abort the request's execution", + ConfigKey: &cfg.MaxRequestExecutionDuration, + DefaultValue: 25 * time.Second, + }, + { + TomlKey: strutils.KebabToConstantCase("max-get-health-execution-duration"), + Usage: "The maximum duration of time allowed for processing a getHealth request. When that time elapses, the rpc server would return -32001 and abort the request's execution", + ConfigKey: &cfg.MaxGetHealthExecutionDuration, + DefaultValue: 5 * time.Second, + }, + { + TomlKey: strutils.KebabToConstantCase("max-get_events-execution-duration"), + Usage: "The maximum duration of time allowed for processing a getEvents request. When that time elapses, the rpc server would return -32001 and abort the request's execution", + ConfigKey: &cfg.MaxGetEventsExecutionDuration, + DefaultValue: 10 * time.Second, + }, + { + TomlKey: strutils.KebabToConstantCase("max-get-network-execution-duration"), + Usage: "The maximum duration of time allowed for processing a getNetwork request. When that time elapses, the rpc server would return -32001 and abort the request's execution", + ConfigKey: &cfg.MaxGetNetworkExecutionDuration, + DefaultValue: 5 * time.Second, + }, + { + TomlKey: strutils.KebabToConstantCase("max-get-latest-ledger-execution-duration"), + Usage: "The maximum duration of time allowed for processing a getLatestLedger request. When that time elapses, the rpc server would return -32001 and abort the request's execution", + ConfigKey: &cfg.MaxGetLatestLedgerExecutionDuration, + DefaultValue: 5 * time.Second, + }, + { + TomlKey: strutils.KebabToConstantCase("max-get_ledger-entries-execution-duration"), + Usage: "The maximum duration of time allowed for processing a getLedgerEntries request. When that time elapses, the rpc server would return -32001 and abort the request's execution", + ConfigKey: &cfg.MaxGetLedgerEntriesExecutionDuration, + DefaultValue: 5 * time.Second, + }, + { + TomlKey: strutils.KebabToConstantCase("max-get-transaction-execution-duration"), + Usage: "The maximum duration of time allowed for processing a getTransaction request. When that time elapses, the rpc server would return -32001 and abort the request's execution", + ConfigKey: &cfg.MaxGetTransactionExecutionDuration, + DefaultValue: 5 * time.Second, + }, + { + TomlKey: strutils.KebabToConstantCase("max-send-transaction-execution-duration"), + Usage: "The maximum duration of time allowed for processing a sendTransaction request. When that time elapses, the rpc server would return -32001 and abort the request's execution", + ConfigKey: &cfg.MaxSendTransactionExecutionDuration, + DefaultValue: 15 * time.Second, + }, + { + TomlKey: strutils.KebabToConstantCase("max-simulate-transaction-execution-duration"), + Usage: "The maximum duration of time allowed for processing a simulateTransaction request. When that time elapses, the rpc server would return -32001 and abort the request's execution", + ConfigKey: &cfg.MaxSimulateTransactionExecutionDuration, + DefaultValue: 15 * time.Second, + }, + } + return *cfg.optionsCache +} + +type errMissingRequiredOption struct { + strErr string + usage string +} + +func (e errMissingRequiredOption) Error() string { + return e.strErr +} + +func required(option *ConfigOption) error { + switch reflect.ValueOf(option.ConfigKey).Elem().Kind() { + case reflect.Slice: + if reflect.ValueOf(option.ConfigKey).Elem().Len() > 0 { + return nil + } + default: + if !reflect.ValueOf(option.ConfigKey).Elem().IsZero() { + return nil + } + } + + waysToSet := []string{} + if option.Name != "" && option.Name != "-" { + waysToSet = append(waysToSet, fmt.Sprintf("specify --%s on the command line", option.Name)) + } + if option.EnvVar != "" && option.EnvVar != "-" { + waysToSet = append(waysToSet, fmt.Sprintf("set the %s environment variable", option.EnvVar)) + } + + if tomlKey, hasTomlKey := option.getTomlKey(); hasTomlKey { + waysToSet = append(waysToSet, fmt.Sprintf("set %s in the config file", tomlKey)) + } + + advice := "" + switch len(waysToSet) { + case 1: + advice = fmt.Sprintf(" Please %s.", waysToSet[0]) + case 2: + advice = fmt.Sprintf(" Please %s or %s.", waysToSet[0], waysToSet[1]) + case 3: + advice = fmt.Sprintf(" Please %s, %s, or %s.", waysToSet[0], waysToSet[1], waysToSet[2]) + } + + return errMissingRequiredOption{strErr: fmt.Sprintf("%s is required.%s", option.Name, advice), usage: option.Usage} +} + +func positive(option *ConfigOption) error { + switch v := option.ConfigKey.(type) { + case *int, *int8, *int16, *int32, *int64: + if reflect.ValueOf(v).Elem().Int() <= 0 { + return fmt.Errorf("%s must be positive", option.Name) + } + case *uint, *uint8, *uint16, *uint32, *uint64: + if reflect.ValueOf(v).Elem().Uint() <= 0 { + return fmt.Errorf("%s must be positive", option.Name) + } + default: + return fmt.Errorf("%s is not a positive integer", option.Name) + } + return nil +} diff --git a/cmd/soroban-rpc/internal/config/options_test.go b/cmd/soroban-rpc/internal/config/options_test.go new file mode 100644 index 00000000..958306ab --- /dev/null +++ b/cmd/soroban-rpc/internal/config/options_test.go @@ -0,0 +1,109 @@ +package config + +import ( + "reflect" + "regexp" + "testing" + "unsafe" + + "github.com/stretchr/testify/assert" +) + +func TestAllConfigKeysMustBePointers(t *testing.T) { + // This test is to ensure we've set up all the config keys correctly. + cfg := Config{} + for _, option := range cfg.options() { + kind := reflect.ValueOf(option.ConfigKey).Type().Kind() + if kind != reflect.Pointer { + t.Errorf("ConfigOption.ConfigKey must be a pointer, got %s for %s", kind, option.Name) + } + } +} + +func TestAllConfigFieldsMustHaveASingleOption(t *testing.T) { + // This test ensures we've documented all the config options, and not missed + // any when adding new flags (or accidentally added conflicting duplicates). + + // Allow us to explicitly exclude any fields on the Config struct, which are not going to have Options. + // e.g. "ConfigPath" + excluded := map[string]bool{} + + cfg := Config{} + cfgValue := reflect.ValueOf(cfg) + cfgType := cfgValue.Type() + + options := cfg.options() + optionsByField := map[uintptr]*ConfigOption{} + for _, option := range options { + key := uintptr(reflect.ValueOf(option.ConfigKey).UnsafePointer()) + if existing, ok := optionsByField[key]; ok { + t.Errorf("Conflicting ConfigOptions %s and %s, point to the same struct field", existing.Name, option.Name) + } + optionsByField[key] = option + } + + // Get the base address of the struct + cfgPtr := uintptr(unsafe.Pointer(&cfg)) + for _, structField := range reflect.VisibleFields(cfgType) { + if excluded[structField.Name] { + continue + } + if !structField.IsExported() { + continue + } + + // Each field has an offset within that struct + fieldPointer := cfgPtr + structField.Offset + + // There should be an option which points to this field + _, ok := optionsByField[fieldPointer] + if !ok { + t.Errorf("Missing ConfigOption for field Config.%s", structField.Name) + } + } +} + +// Use this regex to validate all our config toml keys. +// This is based on the simple bare key regex at: https://toml.io/en/v1.0.0#keys +// Toml, actually allows much more complex keys, via quoted keys, but we want +// to keep things simple. +// +// The one exception we make is `.` in keys, which allows us to have nested +// objects. +var keyRegex = regexp.MustCompile(`^[.A-Za-z0-9_-]+$`) + +func TestAllOptionsMustHaveAUniqueValidTomlKey(t *testing.T) { + // This test ensures we've set a toml key for all the config options, and the + // keys are all unique & valid. Note, we don't need to check that all struct + // fields on the config have an option, because the test above checks that. + + // Allow us to explicitly exclude any fields on the Config struct, which are + // not going to be in the toml. This should be the "Name" field of the + // ConfigOption we wish to exclude. + excluded := map[string]bool{ + "config-path": true, + } + + cfg := Config{} + options := cfg.options() + optionsByTomlKey := map[string]interface{}{} + for _, option := range options { + key, ok := option.getTomlKey() + if excluded[option.Name] { + if ok { + t.Errorf("Found unexpected toml key for excluded ConfigOption %s. Does the test need updating?", option.Name) + } + continue + } + if !ok { + t.Errorf("Missing toml key for ConfigOption %s", option.Name) + } + if existing, ok := optionsByTomlKey[key]; ok { + t.Errorf("Conflicting ConfigOptions %s and %s, have the same toml key: %s", existing, option.Name, key) + } + optionsByTomlKey[key] = option.Name + + // Ensure the keys are simple valid toml keys + assert.True(t, keyRegex.MatchString(key), "Invalid toml key for ConfigOption %s: %s", option.Name, key) + } +} diff --git a/cmd/soroban-rpc/internal/config/parse.go b/cmd/soroban-rpc/internal/config/parse.go new file mode 100644 index 00000000..00d93a17 --- /dev/null +++ b/cmd/soroban-rpc/internal/config/parse.go @@ -0,0 +1,180 @@ +package config + +import ( + "fmt" + "math" + "reflect" + "strconv" + "strings" + "time" + + "github.com/stellar/go/support/errors" +) + +func parseBool(option *ConfigOption, i interface{}) error { + switch v := i.(type) { + case nil: + return nil + case bool: + *option.ConfigKey.(*bool) = v + case string: + lower := strings.ToLower(v) + if lower == "true" { + *option.ConfigKey.(*bool) = true + } else if lower == "false" { + *option.ConfigKey.(*bool) = false + } else { + return fmt.Errorf("invalid boolean value %s: %s", option.Name, v) + } + default: + return fmt.Errorf("could not parse boolean %s: %v", option.Name, i) + } + return nil +} + +func parseInt(option *ConfigOption, i interface{}) error { + switch v := i.(type) { + case nil: + return nil + case string: + parsed, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return err + } + reflect.ValueOf(option.ConfigKey).Elem().SetInt(parsed) + case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64: + return parseInt(option, fmt.Sprint(v)) + default: + return fmt.Errorf("could not parse int %s: %v", option.Name, i) + } + return nil +} + +func parseUint(option *ConfigOption, i interface{}) error { + switch v := i.(type) { + case nil: + return nil + case string: + parsed, err := strconv.ParseUint(v, 10, 64) + if err != nil { + return err + } + reflect.ValueOf(option.ConfigKey).Elem().SetUint(parsed) + case int, int8, int16, int32, int64: + if reflect.ValueOf(v).Int() < 0 { + return fmt.Errorf("%s cannot be negative", option.Name) + } + return parseUint(option, fmt.Sprint(v)) + case uint, uint8, uint16, uint32, uint64: + return parseUint(option, fmt.Sprint(v)) + default: + return fmt.Errorf("could not parse uint %s: %v", option.Name, i) + } + return nil +} + +func parseFloat(option *ConfigOption, i interface{}) error { + switch v := i.(type) { + case nil: + return nil + case string: + parsed, err := strconv.ParseFloat(v, 64) + if err != nil { + return err + } + reflect.ValueOf(option.ConfigKey).Elem().SetFloat(parsed) + case uint, uint8, uint16, uint32, uint64, int, int8, int16, int32, int64, float32, float64: + return parseFloat(option, fmt.Sprint(v)) + default: + return fmt.Errorf("could not parse float %s: %v", option.Name, i) + } + return nil +} + +func parseString(option *ConfigOption, i interface{}) error { + switch v := i.(type) { + case nil: + return nil + case string: + *option.ConfigKey.(*string) = v + default: + return fmt.Errorf("could not parse string %s: %v", option.Name, i) + } + return nil +} + +func parseUint32(option *ConfigOption, i interface{}) error { + switch v := i.(type) { + case nil: + return nil + case string: + parsed, err := strconv.ParseUint(v, 10, 64) + if err != nil { + return err + } + if parsed > math.MaxUint32 { + return fmt.Errorf("%s overflows uint32", option.Name) + } + reflect.ValueOf(option.ConfigKey).Elem().SetUint(parsed) + case int, int8, int16, int32, int64: + if reflect.ValueOf(v).Int() < 0 { + return fmt.Errorf("%s cannot be negative", option.Name) + } + return parseUint32(option, fmt.Sprint(v)) + case uint, uint8, uint16, uint32, uint64: + return parseUint32(option, fmt.Sprint(v)) + default: + return fmt.Errorf("could not parse uint32 %s: %v", option.Name, i) + } + return nil +} + +func parseDuration(option *ConfigOption, i interface{}) error { + switch v := i.(type) { + case nil: + return nil + case string: + d, err := time.ParseDuration(v) + if err != nil { + return errors.Wrapf(err, "could not parse duration: %q", v) + } + *option.ConfigKey.(*time.Duration) = d + case time.Duration: + *option.ConfigKey.(*time.Duration) = v + case *time.Duration: + *option.ConfigKey.(*time.Duration) = *v + default: + return fmt.Errorf("%s is not a duration", option.Name) + } + return nil +} + +func parseStringSlice(option *ConfigOption, i interface{}) error { + switch v := i.(type) { + case nil: + return nil + case string: + if v == "" { + *option.ConfigKey.(*[]string) = nil + } else { + *option.ConfigKey.(*[]string) = strings.Split(v, ",") + } + return nil + case []string: + *option.ConfigKey.(*[]string) = v + return nil + case []interface{}: + *option.ConfigKey.(*[]string) = make([]string, len(v)) + for i, s := range v { + switch s := s.(type) { + case string: + (*option.ConfigKey.(*[]string))[i] = s + default: + return fmt.Errorf("could not parse %s: %v", option.Name, v) + } + } + return nil + default: + return fmt.Errorf("could not parse %s: %v", option.Name, v) + } +} diff --git a/cmd/soroban-rpc/internal/config/test.soroban.rpc.config b/cmd/soroban-rpc/internal/config/test.soroban.rpc.config new file mode 100644 index 00000000..c28a9c17 --- /dev/null +++ b/cmd/soroban-rpc/internal/config/test.soroban.rpc.config @@ -0,0 +1,11 @@ +ENDPOINT="localhost:8003" +FRIENDBOT_URL="http://localhost:8000/friendbot" +NETWORK_PASSPHRASE="Standalone Network ; February 2017" +STELLAR_CORE_URL="http://localhost:11626" +CAPTIVE_CORE_CONFIG_PATH="/opt/stellar/soroban-rpc/etc/stellar-captive-core.cfg" +CAPTIVE_CORE_STORAGE_PATH="/opt/stellar/soroban-rpc/captive-core" +STELLAR_CORE_BINARY_PATH="/usr/bin/stellar-core" +HISTORY_ARCHIVE_URLS=["http://localhost:1570"] +DB_PATH="/opt/stellar/soroban-rpc/rpc_db.sqlite" +STELLAR_CAPTIVE_CORE_HTTP_PORT=0 +CHECKPOINT_FREQUENCY=64 diff --git a/cmd/soroban-rpc/internal/config/toml.go b/cmd/soroban-rpc/internal/config/toml.go new file mode 100644 index 00000000..e6ea5a91 --- /dev/null +++ b/cmd/soroban-rpc/internal/config/toml.go @@ -0,0 +1,109 @@ +package config + +import ( + "fmt" + "io" + "reflect" + "strings" + + "github.com/pelletier/go-toml" +) + +func parseToml(r io.Reader, strict bool, cfg *Config) error { + tree, err := toml.LoadReader(r) + if err != nil { + return err + } + + validKeys := map[string]struct{}{} + for _, option := range cfg.options() { + key, ok := option.getTomlKey() + if !ok { + continue + } + validKeys[key] = struct{}{} + value := tree.Get(key) + if value == nil { + // not found + continue + } + if err := option.setValue(value); err != nil { + return err + } + } + + if cfg.Strict || strict { + for _, key := range tree.Keys() { + if _, ok := validKeys[key]; !ok { + return fmt.Errorf("invalid config: unexpected entry specified in toml file %q", key) + } + } + } + + return nil +} + +func (cfg *Config) MarshalTOML() ([]byte, error) { + tree, err := toml.TreeFromMap(map[string]interface{}{}) + if err != nil { + return nil, err + } + + for _, option := range cfg.options() { + key, ok := option.getTomlKey() + if !ok { + continue + } + + // Downcast a couple primitive types which are not directly supported by the toml encoder + // For non-primitives, you should implement toml.Marshaler instead. + value, err := option.marshalTOML() + if err != nil { + return nil, err + } + + if m, ok := value.(toml.Marshaler); ok { + value, err = m.MarshalTOML() + if err != nil { + return nil, err + } + } + + tree.SetWithOptions( + key, + toml.SetOptions{ + Comment: strings.ReplaceAll( + wordWrap(option.Usage, 80-2), + "\n", + "\n ", + ), + // output unset values commented out + // TODO: Provide commented example values for these + Commented: reflect.ValueOf(option.ConfigKey).Elem().IsZero(), + }, + value, + ) + } + + return tree.Marshal() +} + +// From https://gist.github.com/kennwhite/306317d81ab4a885a965e25aa835b8ef +func wordWrap(text string, lineWidth int) string { + words := strings.Fields(strings.TrimSpace(text)) + if len(words) == 0 { + return text + } + wrapped := words[0] + spaceLeft := lineWidth - len(wrapped) + for _, word := range words[1:] { + if len(word)+1 > spaceLeft { + wrapped += "\n" + word + spaceLeft = lineWidth - len(word) + } else { + wrapped += " " + word + spaceLeft -= 1 + len(word) + } + } + return wrapped +} diff --git a/cmd/soroban-rpc/internal/config/toml_test.go b/cmd/soroban-rpc/internal/config/toml_test.go new file mode 100644 index 00000000..93fa1809 --- /dev/null +++ b/cmd/soroban-rpc/internal/config/toml_test.go @@ -0,0 +1,138 @@ +package config + +import ( + "bytes" + "reflect" + "strings" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/stellar/go/network" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +const basicToml = ` +HISTORY_ARCHIVE_URLS = [ "http://history-futurenet.stellar.org" ] +NETWORK_PASSPHRASE = "Test SDF Future Network ; October 2022" + +# testing comments work ok +STELLAR_CORE_BINARY_PATH = "/usr/bin/stellar-core" +CAPTIVE_CORE_STORAGE_PATH = "/etc/stellar/soroban-rpc" +CAPTIVE_CORE_CONFIG_PATH = "/etc/stellar/soroban-rpc/captive-core.cfg" +` + +func TestBasicTomlReading(t *testing.T) { + cfg := Config{} + require.NoError(t, parseToml(strings.NewReader(basicToml), false, &cfg)) + + // Check the fields got read correctly + assert.Equal(t, []string{"http://history-futurenet.stellar.org"}, cfg.HistoryArchiveURLs) + assert.Equal(t, network.FutureNetworkPassphrase, cfg.NetworkPassphrase) + assert.Equal(t, "/etc/stellar/soroban-rpc", cfg.CaptiveCoreStoragePath) + assert.Equal(t, "/etc/stellar/soroban-rpc/captive-core.cfg", cfg.CaptiveCoreConfigPath) +} + +func TestBasicTomlReadingStrictMode(t *testing.T) { + invalidToml := `UNKNOWN = "key"` + cfg := Config{} + + // Should ignore unknown fields when strict is not set + require.NoError(t, parseToml(strings.NewReader(invalidToml), false, &cfg)) + + // Should panic when unknown key is present and strict is set in the cli + // flags + require.EqualError( + t, + parseToml(strings.NewReader(invalidToml), true, &cfg), + "invalid config: unexpected entry specified in toml file \"UNKNOWN\"", + ) + + // Should panic when unknown key is present and strict is set in the + // config file + invalidStrictToml := ` + STRICT = true + UNKNOWN = "key" +` + require.EqualError( + t, + parseToml(strings.NewReader(invalidStrictToml), false, &cfg), + "invalid config: unexpected entry specified in toml file \"UNKNOWN\"", + ) + + // It succeeds with a valid config + require.NoError(t, parseToml(strings.NewReader(basicToml), true, &cfg)) +} + +func TestBasicTomlWriting(t *testing.T) { + // Set up a default config + cfg := Config{} + require.NoError(t, cfg.loadDefaults()) + + // Output it to toml + outBytes, err := cfg.MarshalTOML() + require.NoError(t, err) + + out := string(outBytes) + + // Spot-check that the output looks right. Try to check one value for each + // type of option. (string, duration, uint, etc...) + assert.Contains(t, out, "ENDPOINT = \"localhost:8000\"") + assert.Contains(t, out, "STELLAR_CORE_TIMEOUT = \"2s\"") + assert.Contains(t, out, "STELLAR_CAPTIVE_CORE_HTTP_PORT = 11626") + assert.Contains(t, out, "LOG_LEVEL = \"info\"") + assert.Contains(t, out, "LOG_FORMAT = \"text\"") + + // Check that the output contains comments about each option + assert.Contains(t, out, "# Network passphrase of the Stellar network transactions should be signed for") + + // Test that it wraps long lines. + // Note the newline at char 80. This also checks it adds a space after the + // comment when outputting multi-line comments, which go-toml does *not* do + // by default. + assert.Contains(t, out, "# configures the event retention window expressed in number of ledgers, the\n# default value is 17280 which corresponds to about 24 hours of history") +} + +func TestRoundTrip(t *testing.T) { + // Set up a default config + cfg := Config{} + require.NoError(t, cfg.loadDefaults()) + + // Generate test values for every option, so we can round-trip test them all. + for _, option := range cfg.options() { + optType := reflect.ValueOf(option.ConfigKey).Elem().Type() + switch option.ConfigKey.(type) { + case *bool: + *option.ConfigKey.(*bool) = true + case *string: + *option.ConfigKey.(*string) = "test" + case *uint: + *option.ConfigKey.(*uint) = 42 + case *uint32: + *option.ConfigKey.(*uint32) = 32 + case *time.Duration: + *option.ConfigKey.(*time.Duration) = 5 * time.Second + case *[]string: + *option.ConfigKey.(*[]string) = []string{"a", "b"} + case *logrus.Level: + *option.ConfigKey.(*logrus.Level) = logrus.InfoLevel + case *LogFormat: + *option.ConfigKey.(*LogFormat) = LogFormatText + default: + t.Fatalf("TestRoundTrip not implemented for type %s, on option %s, please add a test value", optType.Kind(), option.Name) + } + } + + // Output it to toml + outBytes, err := cfg.MarshalTOML() + require.NoError(t, err) + + // t.Log(string(outBytes)) + + // Parse it back + require.NoError( + t, + parseToml(bytes.NewReader(outBytes), false, &cfg), + ) +} diff --git a/cmd/soroban-rpc/internal/config/version.go b/cmd/soroban-rpc/internal/config/version.go new file mode 100644 index 00000000..909fdaea --- /dev/null +++ b/cmd/soroban-rpc/internal/config/version.go @@ -0,0 +1,15 @@ +package config + +var ( + // Version is the soroban-rpc version number, which is injected during build time. + Version = "0.0.0" + + // CommitHash is the soroban-rpc git commit hash, which is injected during build time. + CommitHash = "" + + // BuildTimestamp is the timestamp at which the soroban-rpc was built, injected during build time. + BuildTimestamp = "" + + // Branch is the git branch from which the soroban-rpc was built, injected during build time. + Branch = "" +) diff --git a/cmd/soroban-rpc/internal/daemon/daemon.go b/cmd/soroban-rpc/internal/daemon/daemon.go new file mode 100644 index 00000000..63afb9a7 --- /dev/null +++ b/cmd/soroban-rpc/internal/daemon/daemon.go @@ -0,0 +1,314 @@ +package daemon + +import ( + "context" + "errors" + "net/http" + "net/http/pprof" //nolint:gosec + "os" + "os/signal" + runtimePprof "runtime/pprof" + "sync" + "syscall" + "time" + + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promhttp" + "github.com/stellar/go/clients/stellarcore" + "github.com/stellar/go/historyarchive" + "github.com/stellar/go/ingest/ledgerbackend" + supporthttp "github.com/stellar/go/support/http" + supportlog "github.com/stellar/go/support/log" + "github.com/stellar/go/xdr" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/config" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/db" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/events" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/ingest" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/ledgerbucketwindow" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/preflight" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/transactions" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/util" +) + +const ( + prometheusNamespace = "soroban_rpc" + maxLedgerEntryWriteBatchSize = 150 + defaultReadTimeout = 5 * time.Second + defaultShutdownGracePeriod = 10 * time.Second +) + +type Daemon struct { + core *ledgerbackend.CaptiveStellarCore + coreClient *CoreClientWithMetrics + ingestService *ingest.Service + db *db.DB + jsonRPCHandler *internal.Handler + logger *supportlog.Entry + preflightWorkerPool *preflight.PreflightWorkerPool + server *http.Server + adminServer *http.Server + closeOnce sync.Once + closeError error + done chan struct{} + metricsRegistry *prometheus.Registry +} + +func (d *Daemon) GetDB() *db.DB { + return d.db +} + +func (d *Daemon) close() { + shutdownCtx, shutdownRelease := context.WithTimeout(context.Background(), defaultShutdownGracePeriod) + defer shutdownRelease() + var closeErrors []error + + if err := d.server.Shutdown(shutdownCtx); err != nil { + d.logger.WithError(err).Error("error during Soroban JSON RPC server Shutdown") + closeErrors = append(closeErrors, err) + } + if d.adminServer != nil { + if err := d.adminServer.Shutdown(shutdownCtx); err != nil { + d.logger.WithError(err).Error("error during Soroban JSON admin server Shutdown") + closeErrors = append(closeErrors, err) + } + } + + if err := d.ingestService.Close(); err != nil { + d.logger.WithError(err).Error("error closing ingestion service") + closeErrors = append(closeErrors, err) + } + if err := d.core.Close(); err != nil { + d.logger.WithError(err).Error("error closing captive core") + closeErrors = append(closeErrors, err) + } + d.jsonRPCHandler.Close() + if err := d.db.Close(); err != nil { + d.logger.WithError(err).Error("Error closing db") + closeErrors = append(closeErrors, err) + } + d.preflightWorkerPool.Close() + d.closeError = errors.Join(closeErrors...) + close(d.done) +} + +func (d *Daemon) Close() error { + d.closeOnce.Do(d.close) + return d.closeError +} + +// newCaptiveCore creates a new captive core backend instance and returns it. +func newCaptiveCore(cfg *config.Config, logger *supportlog.Entry) (*ledgerbackend.CaptiveStellarCore, error) { + captiveCoreTomlParams := ledgerbackend.CaptiveCoreTomlParams{ + HTTPPort: &cfg.CaptiveCoreHTTPPort, + HistoryArchiveURLs: cfg.HistoryArchiveURLs, + NetworkPassphrase: cfg.NetworkPassphrase, + Strict: true, + UseDB: true, + EnforceSorobanDiagnosticEvents: true, + } + captiveCoreToml, err := ledgerbackend.NewCaptiveCoreTomlFromFile(cfg.CaptiveCoreConfigPath, captiveCoreTomlParams) + if err != nil { + logger.WithError(err).Fatal("Invalid captive core toml") + } + + captiveConfig := ledgerbackend.CaptiveCoreConfig{ + BinaryPath: cfg.StellarCoreBinaryPath, + StoragePath: cfg.CaptiveCoreStoragePath, + NetworkPassphrase: cfg.NetworkPassphrase, + HistoryArchiveURLs: cfg.HistoryArchiveURLs, + CheckpointFrequency: cfg.CheckpointFrequency, + Log: logger.WithField("subservice", "stellar-core"), + Toml: captiveCoreToml, + UserAgent: "captivecore", + UseDB: true, + } + return ledgerbackend.NewCaptive(captiveConfig) + +} + +func MustNew(cfg *config.Config) *Daemon { + logger := supportlog.New() + logger.SetLevel(cfg.LogLevel) + + if cfg.LogFormat == config.LogFormatJSON { + logger.UseJSONFormatter() + } + + core, err := newCaptiveCore(cfg, logger) + if err != nil { + logger.WithError(err).Fatal("could not create captive core") + } + + if len(cfg.HistoryArchiveURLs) == 0 { + logger.Fatal("no history archives url were provided") + } + historyArchive, err := historyarchive.Connect( + cfg.HistoryArchiveURLs[0], + historyarchive.ConnectOptions{ + CheckpointFrequency: cfg.CheckpointFrequency, + }, + ) + if err != nil { + logger.WithError(err).Fatal("could not connect to history archive") + } + + metricsRegistry := prometheus.NewRegistry() + dbConn, err := db.OpenSQLiteDBWithPrometheusMetrics(cfg.SQLiteDBPath, prometheusNamespace, "db", metricsRegistry) + if err != nil { + logger.WithError(err).Fatal("could not open database") + } + + daemon := &Daemon{ + logger: logger, + core: core, + db: dbConn, + done: make(chan struct{}), + metricsRegistry: metricsRegistry, + coreClient: newCoreClientWithMetrics(stellarcore.Client{ + URL: cfg.StellarCoreURL, + HTTP: &http.Client{Timeout: cfg.CoreRequestTimeout}, + }, metricsRegistry), + } + + eventStore := events.NewMemoryStore( + daemon, + cfg.NetworkPassphrase, + cfg.EventLedgerRetentionWindow, + ) + transactionStore := transactions.NewMemoryStore( + daemon, + cfg.NetworkPassphrase, + cfg.TransactionLedgerRetentionWindow, + ) + + // initialize the stores using what was on the DB + readTxMetaCtx, cancelReadTxMeta := context.WithTimeout(context.Background(), cfg.IngestionTimeout) + defer cancelReadTxMeta() + // NOTE: We could optimize this to avoid unnecessary ingestion calls + // (the range of txmetads can be larger than the store retention windows) + // but it's probably not worth the pain. + err = db.NewLedgerReader(dbConn).StreamAllLedgers(readTxMetaCtx, func(txmeta xdr.LedgerCloseMeta) error { + if err := eventStore.IngestEvents(txmeta); err != nil { + logger.WithError(err).Fatal("could not initialize event memory store") + } + if err := transactionStore.IngestTransactions(txmeta); err != nil { + logger.WithError(err).Fatal("could not initialize transaction memory store") + } + return nil + }) + if err != nil { + logger.WithError(err).Fatal("could not obtain txmeta cache from the database") + } + + onIngestionRetry := func(err error, dur time.Duration) { + logger.WithError(err).Error("could not run ingestion. Retrying") + } + maxRetentionWindow := cfg.EventLedgerRetentionWindow + if cfg.TransactionLedgerRetentionWindow > maxRetentionWindow { + maxRetentionWindow = cfg.TransactionLedgerRetentionWindow + } else if cfg.EventLedgerRetentionWindow == 0 && cfg.TransactionLedgerRetentionWindow > ledgerbucketwindow.DefaultEventLedgerRetentionWindow { + maxRetentionWindow = ledgerbucketwindow.DefaultEventLedgerRetentionWindow + } + ingestService := ingest.NewService(ingest.Config{ + Logger: logger, + DB: db.NewReadWriter(dbConn, maxLedgerEntryWriteBatchSize, maxRetentionWindow), + EventStore: eventStore, + TransactionStore: transactionStore, + NetworkPassPhrase: cfg.NetworkPassphrase, + Archive: historyArchive, + LedgerBackend: core, + Timeout: cfg.IngestionTimeout, + OnIngestionRetry: onIngestionRetry, + Daemon: daemon, + }) + + ledgerEntryReader := db.NewLedgerEntryReader(dbConn) + preflightWorkerPool := preflight.NewPreflightWorkerPool( + daemon, + cfg.PreflightWorkerCount, + cfg.PreflightWorkerQueueSize, + cfg.PreflightEnableDebug, + ledgerEntryReader, + cfg.NetworkPassphrase, + logger, + ) + + jsonRPCHandler := internal.NewJSONRPCHandler(cfg, internal.HandlerParams{ + Daemon: daemon, + EventStore: eventStore, + TransactionStore: transactionStore, + Logger: logger, + LedgerReader: db.NewLedgerReader(dbConn), + LedgerEntryReader: db.NewLedgerEntryReader(dbConn), + PreflightGetter: preflightWorkerPool, + }) + + httpHandler := supporthttp.NewAPIMux(logger) + httpHandler.Handle("/", jsonRPCHandler) + + daemon.preflightWorkerPool = preflightWorkerPool + daemon.ingestService = ingestService + daemon.jsonRPCHandler = &jsonRPCHandler + + daemon.server = &http.Server{ + Addr: cfg.Endpoint, + Handler: httpHandler, + ReadTimeout: defaultReadTimeout, + } + if cfg.AdminEndpoint != "" { + adminMux := supporthttp.NewMux(logger) + adminMux.HandleFunc("/debug/pprof/", pprof.Index) + adminMux.HandleFunc("/debug/pprof/cmdline", pprof.Cmdline) + adminMux.HandleFunc("/debug/pprof/profile", pprof.Profile) + adminMux.HandleFunc("/debug/pprof/symbol", pprof.Symbol) + adminMux.HandleFunc("/debug/pprof/trace", pprof.Trace) + // add the entry points for: + // goroutine, threadcreate, heap, allocs, block, mutex + for _, profile := range runtimePprof.Profiles() { + adminMux.Handle("/debug/pprof/"+profile.Name(), pprof.Handler(profile.Name())) + } + adminMux.Handle("/metrics", promhttp.HandlerFor(metricsRegistry, promhttp.HandlerOpts{})) + daemon.adminServer = &http.Server{Addr: cfg.AdminEndpoint, Handler: adminMux} + } + daemon.registerMetrics() + return daemon +} + +func (d *Daemon) Run() { + d.logger.WithFields(supportlog.F{ + "version": config.Version, + "commit": config.CommitHash, + "addr": d.server.Addr, + }).Info("starting Soroban JSON RPC server") + + panicGroup := util.UnrecoverablePanicGroup.Log(d.logger) + panicGroup.Go(func() { + if err := d.server.ListenAndServe(); !errors.Is(err, http.ErrServerClosed) { + // Error starting or closing listener: + d.logger.WithError(err).Fatal("soroban JSON RPC server encountered fatal error") + } + }) + + if d.adminServer != nil { + panicGroup.Go(func() { + if err := d.adminServer.ListenAndServe(); !errors.Is(err, http.ErrServerClosed) { + d.logger.WithError(err).Error("soroban admin server encountered fatal error") + } + }) + } + + // Shutdown gracefully when we receive an interrupt signal. + // First server.Shutdown closes all open listeners, then closes all idle connections. + // Finally, it waits a grace period (10s here) for connections to return to idle and then shut down. + signals := make(chan os.Signal, 1) + signal.Notify(signals, syscall.SIGINT, syscall.SIGTERM) + + select { + case <-signals: + d.Close() + case <-d.done: + return + } +} diff --git a/cmd/soroban-rpc/internal/daemon/interfaces/interfaces.go b/cmd/soroban-rpc/internal/daemon/interfaces/interfaces.go new file mode 100644 index 00000000..529ecdef --- /dev/null +++ b/cmd/soroban-rpc/internal/daemon/interfaces/interfaces.go @@ -0,0 +1,22 @@ +package interfaces + +import ( + "context" + + "github.com/prometheus/client_golang/prometheus" + proto "github.com/stellar/go/protocols/stellarcore" +) + +// Daemon defines the interface that the Daemon would be implementing. +// this would be useful for decoupling purposes, allowing to test components without +// the actual daemon. +type Daemon interface { + MetricsRegistry() *prometheus.Registry + MetricsNamespace() string + CoreClient() CoreClient +} + +type CoreClient interface { + Info(ctx context.Context) (*proto.InfoResponse, error) + SubmitTransaction(context.Context, string) (*proto.TXResponse, error) +} diff --git a/cmd/soroban-rpc/internal/daemon/interfaces/noOpDaemon.go b/cmd/soroban-rpc/internal/daemon/interfaces/noOpDaemon.go new file mode 100644 index 00000000..e73689a5 --- /dev/null +++ b/cmd/soroban-rpc/internal/daemon/interfaces/noOpDaemon.go @@ -0,0 +1,46 @@ +package interfaces + +import ( + "context" + + "github.com/prometheus/client_golang/prometheus" + proto "github.com/stellar/go/protocols/stellarcore" +) + +// The noOpDeamon is a dummy daemon implementation, supporting the Daemon interface. +// Used only in testing. +type noOpDaemon struct { + metricsRegistry *prometheus.Registry + metricsNamespace string + coreClient noOpCoreClient +} + +func MakeNoOpDeamon() *noOpDaemon { + return &noOpDaemon{ + metricsRegistry: prometheus.NewRegistry(), + metricsNamespace: "soroban_rpc", + coreClient: noOpCoreClient{}, + } +} + +func (d *noOpDaemon) MetricsRegistry() *prometheus.Registry { + return d.metricsRegistry +} + +func (d *noOpDaemon) MetricsNamespace() string { + return d.metricsNamespace +} + +func (d *noOpDaemon) CoreClient() CoreClient { + return d.coreClient +} + +type noOpCoreClient struct{} + +func (s noOpCoreClient) Info(context.Context) (*proto.InfoResponse, error) { + return &proto.InfoResponse{}, nil +} + +func (s noOpCoreClient) SubmitTransaction(context.Context, string) (*proto.TXResponse, error) { + return &proto.TXResponse{Status: proto.PreflightStatusOk}, nil +} diff --git a/cmd/soroban-rpc/internal/daemon/metrics.go b/cmd/soroban-rpc/internal/daemon/metrics.go new file mode 100644 index 00000000..c7c44484 --- /dev/null +++ b/cmd/soroban-rpc/internal/daemon/metrics.go @@ -0,0 +1,104 @@ +package daemon + +import ( + "context" + "runtime" + "time" + + "github.com/prometheus/client_golang/prometheus" + "github.com/stellar/go/clients/stellarcore" + proto "github.com/stellar/go/protocols/stellarcore" + "github.com/stellar/go/support/logmetrics" + "github.com/stellar/go/xdr" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/config" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/daemon/interfaces" +) + +func (d *Daemon) registerMetrics() { + buildInfoGauge := prometheus.NewGaugeVec( + prometheus.GaugeOpts{Namespace: prometheusNamespace, Subsystem: "build", Name: "info"}, + []string{"version", "goversion", "commit", "branch", "build_timestamp"}, + ) + // LogMetricsHook is a metric which counts log lines emitted by soroban rpc + LogMetricsHook := logmetrics.New(prometheusNamespace) + // + buildInfoGauge.With(prometheus.Labels{ + "version": config.Version, + "commit": config.CommitHash, + "branch": config.Branch, + "build_timestamp": config.BuildTimestamp, + "goversion": runtime.Version(), + }).Inc() + + d.metricsRegistry.MustRegister(prometheus.NewGoCollector()) + d.metricsRegistry.MustRegister(prometheus.NewProcessCollector(prometheus.ProcessCollectorOpts{})) + d.metricsRegistry.MustRegister(buildInfoGauge) + + for _, counter := range LogMetricsHook { + d.metricsRegistry.MustRegister(counter) + } +} + +func (d *Daemon) MetricsRegistry() *prometheus.Registry { + return d.metricsRegistry +} + +func (d *Daemon) MetricsNamespace() string { + return prometheusNamespace +} + +type CoreClientWithMetrics struct { + stellarcore.Client + submitMetric *prometheus.SummaryVec + opCountMetric *prometheus.SummaryVec +} + +func newCoreClientWithMetrics(client stellarcore.Client, registry *prometheus.Registry) *CoreClientWithMetrics { + submitMetric := prometheus.NewSummaryVec(prometheus.SummaryOpts{ + Namespace: prometheusNamespace, Subsystem: "txsub", Name: "submission_duration_seconds", + Help: "submission durations to Stellar-Core, sliding window = 10m", + Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001}, + }, []string{"status"}) + opCountMetric := prometheus.NewSummaryVec(prometheus.SummaryOpts{ + Namespace: prometheusNamespace, Subsystem: "txsub", Name: "operation_count", + Help: "number of operations included in a transaction, sliding window = 10m", + Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001}, + }, []string{"status"}) + registry.MustRegister(submitMetric, opCountMetric) + + return &CoreClientWithMetrics{ + Client: client, + submitMetric: submitMetric, + opCountMetric: opCountMetric, + } +} + +func (c *CoreClientWithMetrics) SubmitTransaction(ctx context.Context, envelopeBase64 string) (*proto.TXResponse, error) { + var envelope xdr.TransactionEnvelope + err := xdr.SafeUnmarshalBase64(envelopeBase64, &envelope) + if err != nil { + return nil, err + } + + startTime := time.Now() + response, err := c.Client.SubmitTransaction(ctx, envelopeBase64) + duration := time.Since(startTime).Seconds() + + var label prometheus.Labels + if err != nil { + label = prometheus.Labels{"status": "request_error"} + } else if response.IsException() { + label = prometheus.Labels{"status": "exception"} + } else { + label = prometheus.Labels{"status": response.Status} + } + + c.submitMetric.With(label).Observe(duration) + c.opCountMetric.With(label).Observe(float64(len(envelope.Operations()))) + return response, err +} + +func (d *Daemon) CoreClient() interfaces.CoreClient { + return d.coreClient +} diff --git a/cmd/soroban-rpc/internal/db/db.go b/cmd/soroban-rpc/internal/db/db.go new file mode 100644 index 00000000..428f29fe --- /dev/null +++ b/cmd/soroban-rpc/internal/db/db.go @@ -0,0 +1,267 @@ +package db + +import ( + "context" + "database/sql" + "embed" + "fmt" + "strconv" + "sync" + + sq "github.com/Masterminds/squirrel" + _ "github.com/mattn/go-sqlite3" + "github.com/prometheus/client_golang/prometheus" + migrate "github.com/rubenv/sql-migrate" + + "github.com/stellar/go/support/db" + "github.com/stellar/go/support/errors" + "github.com/stellar/go/xdr" +) + +//go:embed migrations/*.sql +var migrations embed.FS + +var ErrEmptyDB = errors.New("DB is empty") + +const ( + metaTableName = "metadata" + latestLedgerSequenceMetaKey = "LatestLedgerSequence" +) + +type ReadWriter interface { + NewTx(ctx context.Context) (WriteTx, error) + GetLatestLedgerSequence(ctx context.Context) (uint32, error) +} + +type WriteTx interface { + LedgerEntryWriter() LedgerEntryWriter + LedgerWriter() LedgerWriter + Commit(ledgerSeq uint32) error + Rollback() error +} + +type dbCache struct { + latestLedgerSeq uint32 + ledgerEntries transactionalCache // Just like the DB: compress-encoded ledger key -> ledger entry XDR + sync.RWMutex +} + +type DB struct { + db.SessionInterface + cache dbCache +} + +func openSQLiteDB(dbFilePath string) (*db.Session, error) { + // 1. Use Write-Ahead Logging (WAL). + // 2. Disable WAL auto-checkpointing (we will do the checkpointing ourselves with wal_checkpoint pragmas + // after every write transaction). + // 3. Use synchronous=NORMAL, which is faster and still safe in WAL mode. + session, err := db.Open("sqlite3", fmt.Sprintf("file:%s?_journal_mode=WAL&_wal_autocheckpoint=0&_synchronous=NORMAL", dbFilePath)) + if err != nil { + return nil, errors.Wrap(err, "open failed") + } + + if err = runMigrations(session.DB.DB, "sqlite3"); err != nil { + _ = session.Close() + return nil, errors.Wrap(err, "could not run migrations") + } + return session, nil +} + +func OpenSQLiteDBWithPrometheusMetrics(dbFilePath string, namespace string, sub db.Subservice, registry *prometheus.Registry) (*DB, error) { + session, err := openSQLiteDB(dbFilePath) + if err != nil { + return nil, err + } + result := DB{ + SessionInterface: db.RegisterMetrics(session, namespace, sub, registry), + cache: dbCache{ + ledgerEntries: newTransactionalCache(), + }, + } + return &result, nil +} + +func OpenSQLiteDB(dbFilePath string) (*DB, error) { + session, err := openSQLiteDB(dbFilePath) + if err != nil { + return nil, err + } + result := DB{ + SessionInterface: session, + cache: dbCache{ + ledgerEntries: newTransactionalCache(), + }, + } + return &result, nil +} + +func getLatestLedgerSequence(ctx context.Context, q db.SessionInterface, cache *dbCache) (uint32, error) { + sql := sq.Select("value").From(metaTableName).Where(sq.Eq{"key": latestLedgerSequenceMetaKey}) + var results []string + if err := q.Select(ctx, &results, sql); err != nil { + return 0, err + } + switch len(results) { + case 0: + return 0, ErrEmptyDB + case 1: + // expected length on an initialized DB + default: + return 0, fmt.Errorf("multiple entries (%d) for key %q in table %q", len(results), latestLedgerSequenceMetaKey, metaTableName) + } + latestLedgerStr := results[0] + latestLedger, err := strconv.ParseUint(latestLedgerStr, 10, 32) + if err != nil { + return 0, err + } + result := uint32(latestLedger) + + // Add missing ledger sequence to the top cache. + // Otherwise, the write-through cache won't get updated until the first ingestion commit + cache.Lock() + if cache.latestLedgerSeq == 0 { + // Only update the cache if value is missing (0), otherwise + // we may end up overwriting the entry with an older version + cache.latestLedgerSeq = result + } + cache.Unlock() + + return result, nil +} + +type readWriter struct { + db *DB + maxBatchSize int + ledgerRetentionWindow uint32 +} + +// NewReadWriter constructs a new ReadWriter instance and configures +// the size of ledger entry batches when writing ledger entries +// and the retention window for how many historical ledgers are +// recorded in the database. +func NewReadWriter(db *DB, maxBatchSize int, ledgerRetentionWindow uint32) ReadWriter { + return &readWriter{ + db: db, + maxBatchSize: maxBatchSize, + ledgerRetentionWindow: ledgerRetentionWindow, + } +} + +func (rw *readWriter) GetLatestLedgerSequence(ctx context.Context) (uint32, error) { + return getLatestLedgerSequence(ctx, rw.db, &rw.db.cache) +} + +func (rw *readWriter) NewTx(ctx context.Context) (WriteTx, error) { + txSession := rw.db.Clone() + if err := txSession.Begin(ctx); err != nil { + return nil, err + } + stmtCache := sq.NewStmtCache(txSession.GetTx()) + db := rw.db + return writeTx{ + globalCache: &db.cache, + postCommit: func() error { + _, err := db.ExecRaw(ctx, "PRAGMA wal_checkpoint(TRUNCATE)") + return err + }, + tx: txSession, + stmtCache: stmtCache, + ledgerWriter: ledgerWriter{stmtCache: stmtCache}, + ledgerEntryWriter: ledgerEntryWriter{ + stmtCache: stmtCache, + buffer: xdr.NewEncodingBuffer(), + keyToEntryBatch: make(map[string]*xdr.LedgerEntry, rw.maxBatchSize), + ledgerEntryCacheWriteTx: db.cache.ledgerEntries.newWriteTx(rw.maxBatchSize), + maxBatchSize: rw.maxBatchSize, + }, + ledgerRetentionWindow: rw.ledgerRetentionWindow, + }, nil +} + +type writeTx struct { + globalCache *dbCache + postCommit func() error + tx db.SessionInterface + stmtCache *sq.StmtCache + ledgerEntryWriter ledgerEntryWriter + ledgerWriter ledgerWriter + ledgerRetentionWindow uint32 +} + +func (w writeTx) LedgerEntryWriter() LedgerEntryWriter { + return w.ledgerEntryWriter +} + +func (w writeTx) LedgerWriter() LedgerWriter { + return w.ledgerWriter +} + +func (w writeTx) Commit(ledgerSeq uint32) error { + if err := w.ledgerEntryWriter.flush(); err != nil { + return err + } + + if err := w.ledgerWriter.trimLedgers(ledgerSeq, w.ledgerRetentionWindow); err != nil { + return err + } + + _, err := sq.Replace(metaTableName).RunWith(w.stmtCache). + Values(latestLedgerSequenceMetaKey, fmt.Sprintf("%d", ledgerSeq)).Exec() + if err != nil { + return err + } + + // We need to make the cache update atomic with the transaction commit. + // Otherwise, the cache can be made inconsistent if a write transaction finishes + // in between, updating the cache in the wrong order. + commitAndUpdateCache := func() error { + w.globalCache.Lock() + defer w.globalCache.Unlock() + if err = w.tx.Commit(); err != nil { + return err + } + w.globalCache.latestLedgerSeq = ledgerSeq + w.ledgerEntryWriter.ledgerEntryCacheWriteTx.commit() + return nil + } + if err := commitAndUpdateCache(); err != nil { + return err + } + + return w.postCommit() +} + +func (w writeTx) Rollback() error { + // errors.New("not in transaction") is returned when rolling back a transaction which has + // already been committed or rolled back. We can ignore those errors + // because we allow rolling back after commits in defer statements. + if err := w.tx.Rollback(); err == nil || err.Error() == "not in transaction" { + return nil + } else { + return err + } +} + +func runMigrations(db *sql.DB, dialect string) error { + m := &migrate.AssetMigrationSource{ + Asset: migrations.ReadFile, + AssetDir: func() func(string) ([]string, error) { + return func(path string) ([]string, error) { + dirEntry, err := migrations.ReadDir(path) + if err != nil { + return nil, err + } + entries := make([]string, 0) + for _, e := range dirEntry { + entries = append(entries, e.Name()) + } + + return entries, nil + } + }(), + Dir: "migrations", + } + _, err := migrate.ExecMax(db, dialect, m, migrate.Up, 0) + return err +} diff --git a/cmd/soroban-rpc/internal/db/ledger.go b/cmd/soroban-rpc/internal/db/ledger.go new file mode 100644 index 00000000..1b4b0aa2 --- /dev/null +++ b/cmd/soroban-rpc/internal/db/ledger.go @@ -0,0 +1,94 @@ +package db + +import ( + "context" + "fmt" + + sq "github.com/Masterminds/squirrel" + + "github.com/stellar/go/xdr" +) + +const ( + ledgerCloseMetaTableName = "ledger_close_meta" +) + +type StreamLedgerFn func(xdr.LedgerCloseMeta) error + +type LedgerReader interface { + GetLedger(ctx context.Context, sequence uint32) (xdr.LedgerCloseMeta, bool, error) + StreamAllLedgers(ctx context.Context, f StreamLedgerFn) error +} + +type LedgerWriter interface { + InsertLedger(ledger xdr.LedgerCloseMeta) error +} + +type ledgerReader struct { + db *DB +} + +func NewLedgerReader(db *DB) LedgerReader { + return ledgerReader{db: db} +} + +// StreamAllLedgers runs f over all the ledgers in the database (until f errors or signals it's done). +func (r ledgerReader) StreamAllLedgers(ctx context.Context, f StreamLedgerFn) error { + sql := sq.Select("meta").From(ledgerCloseMetaTableName).OrderBy("sequence asc") + q, err := r.db.Query(ctx, sql) + if err != nil { + return err + } + defer q.Close() + for q.Next() { + var closeMeta xdr.LedgerCloseMeta + if err = q.Scan(&closeMeta); err != nil { + return err + } + if err = f(closeMeta); err != nil { + return err + } + } + return nil +} + +// GetLedger fetches a single ledger from the db. +func (r ledgerReader) GetLedger(ctx context.Context, sequence uint32) (xdr.LedgerCloseMeta, bool, error) { + sql := sq.Select("meta").From(ledgerCloseMetaTableName).Where(sq.Eq{"sequence": sequence}) + var results []xdr.LedgerCloseMeta + if err := r.db.Select(ctx, &results, sql); err != nil { + return xdr.LedgerCloseMeta{}, false, err + } + switch len(results) { + case 0: + return xdr.LedgerCloseMeta{}, false, nil + case 1: + return results[0], true, nil + default: + return xdr.LedgerCloseMeta{}, false, fmt.Errorf("multiple lcm entries (%d) for sequence %d in table %q", len(results), sequence, ledgerCloseMetaTableName) + } +} + +type ledgerWriter struct { + stmtCache *sq.StmtCache +} + +// trimLedgers removes all ledgers which fall outside the retention window. +func (l ledgerWriter) trimLedgers(latestLedgerSeq uint32, retentionWindow uint32) error { + if latestLedgerSeq+1 <= retentionWindow { + return nil + } + cutoff := latestLedgerSeq + 1 - retentionWindow + deleteSQL := sq.StatementBuilder.RunWith(l.stmtCache).Delete(ledgerCloseMetaTableName).Where(sq.Lt{"sequence": cutoff}) + _, err := deleteSQL.Exec() + return err +} + +// InsertLedger inserts a ledger in the db. +func (l ledgerWriter) InsertLedger(ledger xdr.LedgerCloseMeta) error { + _, err := sq.StatementBuilder.RunWith(l.stmtCache). + Insert(ledgerCloseMetaTableName). + Values(ledger.LedgerSequence(), ledger). + Exec() + return err +} diff --git a/cmd/soroban-rpc/internal/db/ledger_test.go b/cmd/soroban-rpc/internal/db/ledger_test.go new file mode 100644 index 00000000..bbbfdbee --- /dev/null +++ b/cmd/soroban-rpc/internal/db/ledger_test.go @@ -0,0 +1,95 @@ +package db + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/stellar/go/xdr" +) + +func createLedger(ledgerSequence uint32) xdr.LedgerCloseMeta { + return xdr.LedgerCloseMeta{ + V: 1, + V1: &xdr.LedgerCloseMetaV1{ + LedgerHeader: xdr.LedgerHeaderHistoryEntry{ + Hash: xdr.Hash{}, + Header: xdr.LedgerHeader{ + LedgerSeq: xdr.Uint32(ledgerSequence), + }, + }, + TxSet: xdr.GeneralizedTransactionSet{ + V: 1, + V1TxSet: &xdr.TransactionSetV1{}, + }, + }, + } +} + +func assertLedgerRange(t *testing.T, reader LedgerReader, start, end uint32) { + var allLedgers []xdr.LedgerCloseMeta + err := reader.StreamAllLedgers(context.Background(), func(txmeta xdr.LedgerCloseMeta) error { + allLedgers = append(allLedgers, txmeta) + return nil + }) + assert.NoError(t, err) + for i := start - 1; i <= end+1; i++ { + ledger, exists, err := reader.GetLedger(context.Background(), i) + assert.NoError(t, err) + if i < start || i > end { + assert.False(t, exists) + continue + } + assert.True(t, exists) + ledgerBinary, err := ledger.MarshalBinary() + assert.NoError(t, err) + expected := createLedger(i) + expectedBinary, err := expected.MarshalBinary() + assert.NoError(t, err) + assert.Equal(t, expectedBinary, ledgerBinary) + + ledgerBinary, err = allLedgers[0].MarshalBinary() + assert.NoError(t, err) + assert.Equal(t, expectedBinary, ledgerBinary) + allLedgers = allLedgers[1:] + } + assert.Empty(t, allLedgers) +} + +func TestLedgers(t *testing.T) { + db := NewTestDB(t) + + reader := NewLedgerReader(db) + _, exists, err := reader.GetLedger(context.Background(), 1) + assert.NoError(t, err) + assert.False(t, exists) + + for i := 1; i <= 10; i++ { + ledgerSequence := uint32(i) + tx, err := NewReadWriter(db, 150, 15).NewTx(context.Background()) + assert.NoError(t, err) + assert.NoError(t, tx.LedgerWriter().InsertLedger(createLedger(ledgerSequence))) + assert.NoError(t, tx.Commit(ledgerSequence)) + // rolling back after a commit is a no-op + assert.NoError(t, tx.Rollback()) + } + + assertLedgerRange(t, reader, 1, 10) + + ledgerSequence := uint32(11) + tx, err := NewReadWriter(db, 150, 15).NewTx(context.Background()) + assert.NoError(t, err) + assert.NoError(t, tx.LedgerWriter().InsertLedger(createLedger(ledgerSequence))) + assert.NoError(t, tx.Commit(ledgerSequence)) + + assertLedgerRange(t, reader, 1, 11) + + ledgerSequence = uint32(12) + tx, err = NewReadWriter(db, 150, 5).NewTx(context.Background()) + assert.NoError(t, err) + assert.NoError(t, tx.LedgerWriter().InsertLedger(createLedger(ledgerSequence))) + assert.NoError(t, tx.Commit(ledgerSequence)) + + assertLedgerRange(t, reader, 8, 12) +} diff --git a/cmd/soroban-rpc/internal/db/ledgerentry.go b/cmd/soroban-rpc/internal/db/ledgerentry.go new file mode 100644 index 00000000..1553ecf5 --- /dev/null +++ b/cmd/soroban-rpc/internal/db/ledgerentry.go @@ -0,0 +1,394 @@ +package db + +import ( + "context" + "crypto/sha256" + "database/sql" + "fmt" + + sq "github.com/Masterminds/squirrel" + + "github.com/stellar/go/support/db" + "github.com/stellar/go/support/errors" + "github.com/stellar/go/xdr" +) + +const ( + ledgerEntriesTableName = "ledger_entries" +) + +type LedgerEntryReader interface { + GetLatestLedgerSequence(ctx context.Context) (uint32, error) + NewTx(ctx context.Context) (LedgerEntryReadTx, error) + NewCachedTx(ctx context.Context) (LedgerEntryReadTx, error) +} + +type LedgerKeyAndEntry struct { + Key xdr.LedgerKey + Entry xdr.LedgerEntry + LiveUntilLedgerSeq *uint32 // optional live-until ledger seq, when applicable. +} + +type LedgerEntryReadTx interface { + GetLatestLedgerSequence() (uint32, error) + GetLedgerEntries(keys ...xdr.LedgerKey) ([]LedgerKeyAndEntry, error) + Done() error +} + +type LedgerEntryWriter interface { + UpsertLedgerEntry(entry xdr.LedgerEntry) error + DeleteLedgerEntry(key xdr.LedgerKey) error +} + +type ledgerEntryWriter struct { + stmtCache *sq.StmtCache + buffer *xdr.EncodingBuffer + // nil entries imply deletion + keyToEntryBatch map[string]*xdr.LedgerEntry + ledgerEntryCacheWriteTx transactionalCacheWriteTx + maxBatchSize int +} + +func (l ledgerEntryWriter) UpsertLedgerEntry(entry xdr.LedgerEntry) error { + // We can do a little extra validation to ensure the entry and key match, + // because the key can be derived from the entry. + key, err := entry.LedgerKey() + if err != nil { + return errors.Wrap(err, "could not get ledger key from entry") + } + + encodedKey, err := encodeLedgerKey(l.buffer, key) + if err != nil { + return err + } + + l.keyToEntryBatch[encodedKey] = &entry + return l.maybeFlush() +} + +func (l ledgerEntryWriter) DeleteLedgerEntry(key xdr.LedgerKey) error { + encodedKey, err := encodeLedgerKey(l.buffer, key) + if err != nil { + return err + } + l.keyToEntryBatch[encodedKey] = nil + return l.maybeFlush() +} + +func (l ledgerEntryWriter) maybeFlush() error { + if len(l.keyToEntryBatch) >= l.maxBatchSize { + return l.flush() + } + return nil +} + +func (l ledgerEntryWriter) flush() error { + upsertCount := 0 + upsertSQL := sq.StatementBuilder.RunWith(l.stmtCache).Replace(ledgerEntriesTableName) + var deleteKeys = make([]string, 0, len(l.keyToEntryBatch)) + + upsertCacheUpdates := make(map[string]*string, len(l.keyToEntryBatch)) + for key, entry := range l.keyToEntryBatch { + if entry != nil { + // safe since we cast to string right away + encodedEntry, err := l.buffer.UnsafeMarshalBinary(entry) + if err != nil { + return err + } + encodedEntryStr := string(encodedEntry) + upsertSQL = upsertSQL.Values(key, encodedEntryStr) + upsertCount += 1 + // Only cache Config entries for now + if entry.Data.Type == xdr.LedgerEntryTypeConfigSetting { + upsertCacheUpdates[key] = &encodedEntryStr + } + } else { + deleteKeys = append(deleteKeys, key) + } + // Delete each entry instead of reassigning l.keyToEntryBatch + // to the empty map because the map was allocated with a + // capacity of: make(map[string]*string, rw.maxBatchSize). + // We want to reuse the hashtable buckets in subsequent + // calls to UpsertLedgerEntry / DeleteLedgerEntry. + delete(l.keyToEntryBatch, key) + } + + if upsertCount > 0 { + if _, err := upsertSQL.Exec(); err != nil { + return err + } + for key, entry := range upsertCacheUpdates { + l.ledgerEntryCacheWriteTx.upsert(key, *entry) + } + } + + if len(deleteKeys) > 0 { + deleteSQL := sq.StatementBuilder.RunWith(l.stmtCache).Delete(ledgerEntriesTableName).Where(sq.Eq{"key": deleteKeys}) + if _, err := deleteSQL.Exec(); err != nil { + return err + } + for _, key := range deleteKeys { + l.ledgerEntryCacheWriteTx.delete(key) + } + } + + return nil +} + +type ledgerEntryReadTx struct { + globalCache *dbCache + stmtCache *sq.StmtCache + latestLedgerSeqCache uint32 + ledgerEntryCacheReadTx *transactionalCacheReadTx + tx db.SessionInterface + buffer *xdr.EncodingBuffer +} + +func (l *ledgerEntryReadTx) GetLatestLedgerSequence() (uint32, error) { + if l.latestLedgerSeqCache != 0 { + return l.latestLedgerSeqCache, nil + } + latestLedgerSeq, err := getLatestLedgerSequence(context.Background(), l.tx, l.globalCache) + if err == nil { + l.latestLedgerSeqCache = latestLedgerSeq + } + return latestLedgerSeq, err +} + +// From compressed XDR keys to XDR entries (i.e. using the DB's representation) +func (l *ledgerEntryReadTx) getRawLedgerEntries(keys ...string) (map[string]string, error) { + result := make(map[string]string, len(keys)) + keysToQueryInDB := keys + if l.ledgerEntryCacheReadTx != nil { + keysToQueryInDB = make([]string, 0, len(keys)) + for _, k := range keys { + entry, ok := l.ledgerEntryCacheReadTx.get(k) + if !ok { + keysToQueryInDB = append(keysToQueryInDB, k) + } + if entry != nil { + result[k] = *entry + } + } + } + + if len(keysToQueryInDB) == 0 { + return result, nil + } + + builder := sq.StatementBuilder + if l.stmtCache != nil { + builder = builder.RunWith(l.stmtCache) + } else { + builder = builder.RunWith(l.tx.GetTx()) + } + sql := builder.Select("key", "entry").From(ledgerEntriesTableName).Where(sq.Eq{"key": keysToQueryInDB}) + q, err := sql.Query() + if err != nil { + return nil, err + } + defer q.Close() + for q.Next() { + var key, entry string + if err = q.Scan(&key, &entry); err != nil { + return nil, err + } + result[key] = entry + if l.ledgerEntryCacheReadTx != nil { + l.ledgerEntryCacheReadTx.upsert(key, &entry) + + // Add missing config setting entries to the top cache. + // Otherwise, the write-through cache won't get updated on restarts + // (after which we don't process past config setting updates) + keyType, err := xdr.GetBinaryCompressedLedgerKeyType([]byte(key)) + if err != nil { + return nil, err + } + if keyType == xdr.LedgerEntryTypeConfigSetting { + l.globalCache.Lock() + // Only update the cache if the entry is missing, otherwise + // we may end up overwriting the entry with an older version + if _, ok := l.globalCache.ledgerEntries.entries[key]; !ok { + l.globalCache.ledgerEntries.entries[key] = entry + } + defer l.globalCache.Unlock() + } + } + } + return result, nil +} + +func GetLedgerEntry(tx LedgerEntryReadTx, key xdr.LedgerKey) (bool, xdr.LedgerEntry, *uint32, error) { + keyEntries, err := tx.GetLedgerEntries(key) + if err != nil { + return false, xdr.LedgerEntry{}, nil, err + } + switch len(keyEntries) { + case 0: + return false, xdr.LedgerEntry{}, nil, nil + case 1: + // expected length + return true, keyEntries[0].Entry, keyEntries[0].LiveUntilLedgerSeq, nil + default: + return false, xdr.LedgerEntry{}, nil, fmt.Errorf("multiple entries (%d) for key %v", len(keyEntries), key) + } +} + +// hasTTLKey check to see if the key type is expected to be accompanied by a LedgerTTLEntry +func hasTTLKey(key xdr.LedgerKey) bool { + switch key.Type { + case xdr.LedgerEntryTypeContractData: + return true + case xdr.LedgerEntryTypeContractCode: + return true + default: + } + return false +} + +func entryKeyToTTLEntryKey(key xdr.LedgerKey) (xdr.LedgerKey, error) { + buf, err := key.MarshalBinary() + if err != nil { + return xdr.LedgerKey{}, err + } + var ttlEntry xdr.LedgerKey + err = ttlEntry.SetTtl(sha256.Sum256(buf)) + if err != nil { + return xdr.LedgerKey{}, err + } + return ttlEntry, nil +} + +func (l *ledgerEntryReadTx) GetLedgerEntries(keys ...xdr.LedgerKey) ([]LedgerKeyAndEntry, error) { + encodedKeys := make([]string, 0, 2*len(keys)) + type keyToEncoded struct { + key xdr.LedgerKey + encodedKey string + encodedTTLKey *string + } + keysToEncoded := make([]keyToEncoded, len(keys)) + for i, k := range keys { + k2 := k + keysToEncoded[i].key = k2 + encodedKey, err := encodeLedgerKey(l.buffer, k) + if err != nil { + return nil, err + } + keysToEncoded[i].encodedKey = encodedKey + encodedKeys = append(encodedKeys, encodedKey) + if !hasTTLKey(k) { + continue + } + ttlEntryKey, err := entryKeyToTTLEntryKey(k) + if err != nil { + return nil, err + } + encodedTTLKey, err := encodeLedgerKey(l.buffer, ttlEntryKey) + if err != nil { + return nil, err + } + keysToEncoded[i].encodedTTLKey = &encodedTTLKey + encodedKeys = append(encodedKeys, encodedTTLKey) + } + + rawResult, err := l.getRawLedgerEntries(encodedKeys...) + if err != nil { + return nil, err + } + + result := make([]LedgerKeyAndEntry, 0, len(keys)) + for _, k2e := range keysToEncoded { + encodedEntry, ok := rawResult[k2e.encodedKey] + if !ok { + continue + } + var entry xdr.LedgerEntry + if err := xdr.SafeUnmarshal([]byte(encodedEntry), &entry); err != nil { + return nil, errors.Wrap(err, "cannot decode ledger entry from DB") + } + if k2e.encodedTTLKey == nil { + result = append(result, LedgerKeyAndEntry{k2e.key, entry, nil}) + continue + } + encodedTTLEntry, ok := rawResult[*k2e.encodedTTLKey] + if !ok { + // missing ttl key. This should not happen. + return nil, errors.New("missing ttl key entry") + } + var ttlEntry xdr.LedgerEntry + if err := xdr.SafeUnmarshal([]byte(encodedTTLEntry), &ttlEntry); err != nil { + return nil, errors.Wrap(err, "cannot decode TTL ledger entry from DB") + } + liveUntilSeq := uint32(ttlEntry.Data.Ttl.LiveUntilLedgerSeq) + result = append(result, LedgerKeyAndEntry{k2e.key, entry, &liveUntilSeq}) + } + + return result, nil +} + +func (l ledgerEntryReadTx) Done() error { + // Since it's a read-only transaction, we don't + // care whether we commit it or roll it back as long as we close it + return l.tx.Rollback() +} + +type ledgerEntryReader struct { + db *DB +} + +func NewLedgerEntryReader(db *DB) LedgerEntryReader { + return ledgerEntryReader{db: db} +} + +func (r ledgerEntryReader) GetLatestLedgerSequence(ctx context.Context) (uint32, error) { + return getLatestLedgerSequence(ctx, r.db, &r.db.cache) +} + +// NewCachedTx() caches all accessed ledger entries and select statements. If many ledger entries are accessed, it will grow without bounds. +func (r ledgerEntryReader) NewCachedTx(ctx context.Context) (LedgerEntryReadTx, error) { + txSession := r.db.Clone() + // We need to copy the cached ledger entries locally when we start the transaction + // since otherwise we would break the consistency between the transaction and the cache. + + // We need to make the parent cache access atomic with the read transaction creation. + // Otherwise, the cache can be made inconsistent if a write transaction finishes + // in between, updating the cache. + r.db.cache.RLock() + defer r.db.cache.RUnlock() + if err := txSession.BeginTx(ctx, &sql.TxOptions{ReadOnly: true}); err != nil { + return nil, err + } + cacheReadTx := r.db.cache.ledgerEntries.newReadTx() + return &ledgerEntryReadTx{ + globalCache: &r.db.cache, + stmtCache: sq.NewStmtCache(txSession.GetTx()), + latestLedgerSeqCache: r.db.cache.latestLedgerSeq, + ledgerEntryCacheReadTx: &cacheReadTx, + tx: txSession, + buffer: xdr.NewEncodingBuffer(), + }, nil +} + +func (r ledgerEntryReader) NewTx(ctx context.Context) (LedgerEntryReadTx, error) { + txSession := r.db.Clone() + if err := txSession.BeginTx(ctx, &sql.TxOptions{ReadOnly: true}); err != nil { + return nil, err + } + r.db.cache.RLock() + defer r.db.cache.RUnlock() + return &ledgerEntryReadTx{ + globalCache: &r.db.cache, + latestLedgerSeqCache: r.db.cache.latestLedgerSeq, + tx: txSession, + buffer: xdr.NewEncodingBuffer(), + }, nil +} + +func encodeLedgerKey(buffer *xdr.EncodingBuffer, key xdr.LedgerKey) (string, error) { + // this is safe since we are converting to string right away, which causes a copy + binKey, err := buffer.LedgerKeyUnsafeMarshalBinaryCompress(key) + if err != nil { + return "", err + } + return string(binKey), nil +} diff --git a/cmd/soroban-rpc/internal/db/ledgerentry_test.go b/cmd/soroban-rpc/internal/db/ledgerentry_test.go new file mode 100644 index 00000000..2e6b0012 --- /dev/null +++ b/cmd/soroban-rpc/internal/db/ledgerentry_test.go @@ -0,0 +1,584 @@ +package db + +import ( + "context" + "fmt" + "math/rand" + "path" + "sync" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/stellar/go/xdr" +) + +func getLedgerEntryAndLatestLedgerSequenceWithErr(db *DB, key xdr.LedgerKey) (bool, xdr.LedgerEntry, uint32, *uint32, error) { + tx, err := NewLedgerEntryReader(db).NewTx(context.Background()) + if err != nil { + return false, xdr.LedgerEntry{}, 0, nil, err + } + var doneErr error + defer func() { + doneErr = tx.Done() + }() + + latestSeq, err := tx.GetLatestLedgerSequence() + if err != nil { + return false, xdr.LedgerEntry{}, 0, nil, err + } + + present, entry, expSeq, err := GetLedgerEntry(tx, key) + if err != nil { + return false, xdr.LedgerEntry{}, 0, nil, err + } + + return present, entry, latestSeq, expSeq, doneErr +} + +func getLedgerEntryAndLatestLedgerSequence(t require.TestingT, db *DB, key xdr.LedgerKey) (bool, xdr.LedgerEntry, uint32, *uint32) { + present, entry, latestSeq, expSeq, err := getLedgerEntryAndLatestLedgerSequenceWithErr(db, key) + require.NoError(t, err) + return present, entry, latestSeq, expSeq +} + +func TestGoldenPath(t *testing.T) { + db := NewTestDB(t) + // Check that we get an empty DB error + _, err := NewLedgerEntryReader(db).GetLatestLedgerSequence(context.Background()) + assert.Equal(t, ErrEmptyDB, err) + + tx, err := NewReadWriter(db, 150, 15).NewTx(context.Background()) + assert.NoError(t, err) + writer := tx.LedgerEntryWriter() + + // Fill the DB with a single entry and fetch it + four := xdr.Uint32(4) + six := xdr.Uint32(6) + data := xdr.ContractDataEntry{ + Contract: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &xdr.Hash{0xca, 0xfe}, + }, + Key: xdr.ScVal{ + Type: xdr.ScValTypeScvU32, + U32: &four, + }, + Durability: xdr.ContractDataDurabilityPersistent, + Val: xdr.ScVal{ + Type: xdr.ScValTypeScvU32, + U32: &six, + }, + } + key, entry := getContractDataLedgerEntry(t, data) + assert.NoError(t, writer.UpsertLedgerEntry(entry)) + + expLedgerKey, err := entryKeyToTTLEntryKey(key) + assert.NoError(t, err) + expLegerEntry := getTTLLedgerEntry(expLedgerKey) + assert.NoError(t, writer.UpsertLedgerEntry(expLegerEntry)) + + ledgerSequence := uint32(23) + assert.NoError(t, tx.Commit(ledgerSequence)) + + present, obtainedEntry, obtainedLedgerSequence, liveUntilSeq := getLedgerEntryAndLatestLedgerSequence(t, db, key) + assert.True(t, present) + assert.Equal(t, ledgerSequence, obtainedLedgerSequence) + require.NotNil(t, liveUntilSeq) + assert.Equal(t, uint32(expLegerEntry.Data.Ttl.LiveUntilLedgerSeq), *liveUntilSeq) + assert.Equal(t, obtainedEntry.Data.Type, xdr.LedgerEntryTypeContractData) + assert.Equal(t, xdr.Hash{0xca, 0xfe}, *obtainedEntry.Data.ContractData.Contract.ContractId) + assert.Equal(t, six, *obtainedEntry.Data.ContractData.Val.U32) + + obtainedLedgerSequence, err = NewLedgerEntryReader(db).GetLatestLedgerSequence(context.Background()) + assert.NoError(t, err) + assert.Equal(t, ledgerSequence, obtainedLedgerSequence) + + // Do another round, overwriting the ledger entry + tx, err = NewReadWriter(db, 150, 15).NewTx(context.Background()) + assert.NoError(t, err) + writer = tx.LedgerEntryWriter() + eight := xdr.Uint32(8) + entry.Data.ContractData.Val.U32 = &eight + + assert.NoError(t, writer.UpsertLedgerEntry(entry)) + + ledgerSequence = uint32(24) + assert.NoError(t, tx.Commit(ledgerSequence)) + + present, obtainedEntry, obtainedLedgerSequence, liveUntilSeq = getLedgerEntryAndLatestLedgerSequence(t, db, key) + assert.True(t, present) + require.NotNil(t, liveUntilSeq) + assert.Equal(t, ledgerSequence, obtainedLedgerSequence) + assert.Equal(t, eight, *obtainedEntry.Data.ContractData.Val.U32) + + // Do another round, deleting the ledger entry + tx, err = NewReadWriter(db, 150, 15).NewTx(context.Background()) + assert.NoError(t, err) + writer = tx.LedgerEntryWriter() + assert.NoError(t, err) + + assert.NoError(t, writer.DeleteLedgerEntry(key)) + ledgerSequence = uint32(25) + assert.NoError(t, tx.Commit(ledgerSequence)) + + present, _, obtainedLedgerSequence, liveUntilSeq = getLedgerEntryAndLatestLedgerSequence(t, db, key) + assert.False(t, present) + assert.Nil(t, liveUntilSeq) + assert.Equal(t, ledgerSequence, obtainedLedgerSequence) + + obtainedLedgerSequence, err = NewLedgerEntryReader(db).GetLatestLedgerSequence(context.Background()) + assert.NoError(t, err) + assert.Equal(t, ledgerSequence, obtainedLedgerSequence) +} + +func TestDeleteNonExistentLedgerEmpty(t *testing.T) { + db := NewTestDB(t) + + // Simulate a ledger which creates and deletes a ledger entry + // which would result in trying to delete a ledger entry which isn't there + tx, err := NewReadWriter(db, 150, 15).NewTx(context.Background()) + assert.NoError(t, err) + writer := tx.LedgerEntryWriter() + + four := xdr.Uint32(4) + six := xdr.Uint32(6) + data := xdr.ContractDataEntry{ + Contract: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &xdr.Hash{0xca, 0xfe}, + }, + Key: xdr.ScVal{ + Type: xdr.ScValTypeScvU32, + U32: &four, + }, + Durability: xdr.ContractDataDurabilityPersistent, + Val: xdr.ScVal{ + Type: xdr.ScValTypeScvU32, + U32: &six, + }, + } + key, _ := getContractDataLedgerEntry(t, data) + assert.NoError(t, writer.DeleteLedgerEntry(key)) + ledgerSequence := uint32(23) + assert.NoError(t, tx.Commit(ledgerSequence)) + + // Make sure that the ledger number was submitted + obtainedLedgerSequence, err := NewLedgerEntryReader(db).GetLatestLedgerSequence(context.Background()) + assert.NoError(t, err) + assert.Equal(t, ledgerSequence, obtainedLedgerSequence) + + // And that the entry doesn't exist + present, _, obtainedLedgerSequence, expSeq := getLedgerEntryAndLatestLedgerSequence(t, db, key) + assert.False(t, present) + require.Nil(t, expSeq) + assert.Equal(t, ledgerSequence, obtainedLedgerSequence) +} + +func getContractDataLedgerEntry(t require.TestingT, data xdr.ContractDataEntry) (xdr.LedgerKey, xdr.LedgerEntry) { + entry := xdr.LedgerEntry{ + LastModifiedLedgerSeq: 1, + Data: xdr.LedgerEntryData{ + Type: xdr.LedgerEntryTypeContractData, + ContractData: &data, + }, + Ext: xdr.LedgerEntryExt{}, + } + var key xdr.LedgerKey + err := key.SetContractData(data.Contract, data.Key, data.Durability) + require.NoError(t, err) + return key, entry +} + +func getTTLLedgerEntry(key xdr.LedgerKey) xdr.LedgerEntry { + var expLegerEntry xdr.LedgerEntry + expLegerEntry.Data.Ttl = &xdr.TtlEntry{ + KeyHash: key.Ttl.KeyHash, + LiveUntilLedgerSeq: 100, + } + expLegerEntry.Data.Type = key.Type + return expLegerEntry +} + +// Make sure that (multiple, simultaneous) read transactions can happen while a write-transaction is ongoing, +// and write is only visible once the transaction is committed +func TestReadTxsDuringWriteTx(t *testing.T) { + db := NewTestDB(t) + + // Check that we get an empty DB error + _, err := NewLedgerEntryReader(db).GetLatestLedgerSequence(context.Background()) + assert.Equal(t, ErrEmptyDB, err) + + // Start filling the DB with a single entry (enforce flushing right away) + tx, err := NewReadWriter(db, 0, 15).NewTx(context.Background()) + assert.NoError(t, err) + writer := tx.LedgerEntryWriter() + + four := xdr.Uint32(4) + six := xdr.Uint32(6) + data := xdr.ContractDataEntry{ + Contract: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &xdr.Hash{0xca, 0xfe}, + }, + Key: xdr.ScVal{ + Type: xdr.ScValTypeScvU32, + U32: &four, + }, + Val: xdr.ScVal{ + Type: xdr.ScValTypeScvU32, + U32: &six, + }, + } + key, entry := getContractDataLedgerEntry(t, data) + assert.NoError(t, writer.UpsertLedgerEntry(entry)) + + expLedgerKey, err := entryKeyToTTLEntryKey(key) + assert.NoError(t, err) + expLegerEntry := getTTLLedgerEntry(expLedgerKey) + assert.NoError(t, writer.UpsertLedgerEntry(expLegerEntry)) + + // Before committing the changes, make sure multiple concurrent transactions can query the DB + readTx1, err := NewLedgerEntryReader(db).NewTx(context.Background()) + assert.NoError(t, err) + readTx2, err := NewLedgerEntryReader(db).NewTx(context.Background()) + assert.NoError(t, err) + + _, err = readTx1.GetLatestLedgerSequence() + assert.Equal(t, ErrEmptyDB, err) + present, _, expSeq, err := GetLedgerEntry(readTx1, key) + require.Nil(t, expSeq) + assert.NoError(t, err) + assert.False(t, present) + assert.NoError(t, readTx1.Done()) + + _, err = readTx2.GetLatestLedgerSequence() + assert.Equal(t, ErrEmptyDB, err) + present, _, expSeq, err = GetLedgerEntry(readTx2, key) + assert.NoError(t, err) + assert.False(t, present) + assert.Nil(t, expSeq) + assert.NoError(t, readTx2.Done()) + + // Finish the write transaction and check that the results are present + ledgerSequence := uint32(23) + assert.NoError(t, tx.Commit(ledgerSequence)) + + obtainedLedgerSequence, err := NewLedgerEntryReader(db).GetLatestLedgerSequence(context.Background()) + assert.NoError(t, err) + assert.Equal(t, ledgerSequence, obtainedLedgerSequence) + + present, obtainedEntry, obtainedLedgerSequence, expSeq := getLedgerEntryAndLatestLedgerSequence(t, db, key) + assert.True(t, present) + assert.Equal(t, ledgerSequence, obtainedLedgerSequence) + assert.Equal(t, six, *obtainedEntry.Data.ContractData.Val.U32) + assert.NotNil(t, expSeq) +} + +// Make sure that a write transaction can happen while multiple read transactions are ongoing, +// and write is only visible once the transaction is committed +func TestWriteTxsDuringReadTxs(t *testing.T) { + db := NewTestDB(t) + + // Check that we get an empty DB error + _, err := NewLedgerEntryReader(db).GetLatestLedgerSequence(context.Background()) + assert.Equal(t, ErrEmptyDB, err) + + // Create a multiple read transactions, interleaved with the writing process + + // First read transaction, before the write transaction is created + readTx1, err := NewLedgerEntryReader(db).NewTx(context.Background()) + assert.NoError(t, err) + + // Start filling the DB with a single entry (enforce flushing right away) + tx, err := NewReadWriter(db, 0, 15).NewTx(context.Background()) + assert.NoError(t, err) + writer := tx.LedgerEntryWriter() + + // Second read transaction, after the write transaction is created + readTx2, err := NewLedgerEntryReader(db).NewTx(context.Background()) + assert.NoError(t, err) + + four := xdr.Uint32(4) + six := xdr.Uint32(6) + data := xdr.ContractDataEntry{ + Contract: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &xdr.Hash{0xca, 0xfe}, + }, + Key: xdr.ScVal{ + Type: xdr.ScValTypeScvU32, + U32: &four, + }, + Durability: xdr.ContractDataDurabilityPersistent, + Val: xdr.ScVal{ + Type: xdr.ScValTypeScvU32, + U32: &six, + }, + } + key, entry := getContractDataLedgerEntry(t, data) + assert.NoError(t, writer.UpsertLedgerEntry(entry)) + + expLedgerKey, err := entryKeyToTTLEntryKey(key) + assert.NoError(t, err) + expLegerEntry := getTTLLedgerEntry(expLedgerKey) + assert.NoError(t, writer.UpsertLedgerEntry(expLegerEntry)) + + // Third read transaction, after the first insert has happened in the write transaction + readTx3, err := NewLedgerEntryReader(db).NewTx(context.Background()) + assert.NoError(t, err) + + // Make sure that all the read transactions get an emptyDB error before and after the write transaction is committed + for _, readTx := range []LedgerEntryReadTx{readTx1, readTx2, readTx3} { + _, err = readTx.GetLatestLedgerSequence() + assert.Equal(t, ErrEmptyDB, err) + present, _, _, err := GetLedgerEntry(readTx, key) + assert.NoError(t, err) + assert.False(t, present) + } + + // commit the write transaction + ledgerSequence := uint32(23) + assert.NoError(t, tx.Commit(ledgerSequence)) + + for _, readTx := range []LedgerEntryReadTx{readTx1, readTx2, readTx3} { + _, err = readTx.GetLatestLedgerSequence() + assert.Equal(t, ErrEmptyDB, err) + present, _, _, err := GetLedgerEntry(readTx, key) + assert.NoError(t, err) + assert.False(t, present) + } + + // Check that the results are present in the transactions happening after the commit + + obtainedLedgerSequence, err := NewLedgerEntryReader(db).GetLatestLedgerSequence(context.Background()) + assert.NoError(t, err) + assert.Equal(t, ledgerSequence, obtainedLedgerSequence) + + present, obtainedEntry, obtainedLedgerSequence, expSeq := getLedgerEntryAndLatestLedgerSequence(t, db, key) + assert.True(t, present) + require.NotNil(t, expSeq) + assert.Equal(t, ledgerSequence, obtainedLedgerSequence) + assert.Equal(t, six, *obtainedEntry.Data.ContractData.Val.U32) + + for _, readTx := range []LedgerEntryReadTx{readTx1, readTx2, readTx3} { + assert.NoError(t, readTx.Done()) + } +} + +// Check that we can have coexisting reader and writer goroutines without deadlocks or errors +func TestConcurrentReadersAndWriter(t *testing.T) { + db := NewTestDB(t) + + contractID := xdr.Hash{0xca, 0xfe} + done := make(chan struct{}) + var wg sync.WaitGroup + logMessageCh := make(chan string, 1) + writer := func() { + defer wg.Done() + data := func(i int) xdr.ContractDataEntry { + val := xdr.Uint32(i) + return xdr.ContractDataEntry{ + Contract: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &contractID, + }, + Key: xdr.ScVal{ + Type: xdr.ScValTypeScvU32, + U32: &val, + }, + Durability: xdr.ContractDataDurabilityPersistent, + Val: xdr.ScVal{ + Type: xdr.ScValTypeScvU32, + U32: &val, + }, + } + } + rw := NewReadWriter(db, 10, 15) + for ledgerSequence := uint32(0); ledgerSequence < 1000; ledgerSequence++ { + tx, err := rw.NewTx(context.Background()) + assert.NoError(t, err) + writer := tx.LedgerEntryWriter() + for i := 0; i < 200; i++ { + key, entry := getContractDataLedgerEntry(t, data(i)) + assert.NoError(t, writer.UpsertLedgerEntry(entry)) + expLedgerKey, err := entryKeyToTTLEntryKey(key) + assert.NoError(t, err) + expLegerEntry := getTTLLedgerEntry(expLedgerKey) + assert.NoError(t, writer.UpsertLedgerEntry(expLegerEntry)) + } + assert.NoError(t, tx.Commit(ledgerSequence)) + logMessageCh <- fmt.Sprintf("Wrote ledger %d", ledgerSequence) + time.Sleep(time.Duration(rand.Int31n(30)) * time.Millisecond) + } + close(done) + } + reader := func(keyVal int) { + defer wg.Done() + val := xdr.Uint32(keyVal) + key := xdr.LedgerKey{ + Type: xdr.LedgerEntryTypeContractData, + ContractData: &xdr.LedgerKeyContractData{ + Contract: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &contractID, + }, + Key: xdr.ScVal{ + Type: xdr.ScValTypeScvU32, + U32: &val, + }, + Durability: xdr.ContractDataDurabilityPersistent, + }, + } + for { + select { + case <-done: + return + default: + } + found, ledgerEntry, ledger, _, err := getLedgerEntryAndLatestLedgerSequenceWithErr(db, key) + if err != nil { + if err != ErrEmptyDB { + t.Fatalf("reader %d failed with error %v\n", keyVal, err) + } + } else { + // All entries should be found once the first write commit is done + assert.True(t, found) + logMessageCh <- fmt.Sprintf("reader %d: for ledger %d", keyVal, ledger) + assert.Equal(t, xdr.Uint32(keyVal), *ledgerEntry.Data.ContractData.Val.U32) + } + time.Sleep(time.Duration(rand.Int31n(30)) * time.Millisecond) + } + } + + // one readWriter, 32 readers + wg.Add(1) + go writer() + + for i := 1; i <= 32; i++ { + wg.Add(1) + go reader(i) + } + + workersExitCh := make(chan struct{}) + go func() { + defer close(workersExitCh) + wg.Wait() + }() + +forloop: + for { + select { + case <-workersExitCh: + break forloop + case msg := <-logMessageCh: + t.Log(msg) + } + } + +} + +func benchmarkLedgerEntry(b *testing.B, cached bool) { + db := NewTestDB(b) + keyUint32 := xdr.Uint32(0) + data := xdr.ContractDataEntry{ + Contract: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &xdr.Hash{0xca, 0xfe}, + }, + Key: xdr.ScVal{ + Type: xdr.ScValTypeScvU32, + U32: &keyUint32, + }, + Durability: xdr.ContractDataDurabilityPersistent, + Val: xdr.ScVal{ + Type: xdr.ScValTypeScvU32, + U32: &keyUint32, + }, + } + key, entry := getContractDataLedgerEntry(b, data) + tx, err := NewReadWriter(db, 150, 15).NewTx(context.Background()) + assert.NoError(b, err) + assert.NoError(b, tx.LedgerEntryWriter().UpsertLedgerEntry(entry)) + assert.NoError(b, tx.Commit(2)) + reader := NewLedgerEntryReader(db) + const numQueriesPerOp = 15 + b.ResetTimer() + b.StopTimer() + for i := 0; i < b.N; i++ { + var readTx LedgerEntryReadTx + var err error + if cached { + readTx, err = reader.NewCachedTx(context.Background()) + } else { + readTx, err = reader.NewTx(context.Background()) + } + assert.NoError(b, err) + for i := 0; i < numQueriesPerOp; i++ { + b.StartTimer() + found, _, _, err := GetLedgerEntry(readTx, key) + b.StopTimer() + assert.NoError(b, err) + assert.True(b, found) + } + assert.NoError(b, readTx.Done()) + } +} + +func BenchmarkGetLedgerEntry(b *testing.B) { + b.Run("With cache", func(b *testing.B) { benchmarkLedgerEntry(b, true) }) + b.Run("Without cache", func(b *testing.B) { benchmarkLedgerEntry(b, false) }) +} + +func BenchmarkLedgerUpdate(b *testing.B) { + db := NewTestDB(b) + keyUint32 := xdr.Uint32(0) + data := xdr.ContractDataEntry{ + Contract: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &xdr.Hash{0xca, 0xfe}, + }, + Key: xdr.ScVal{ + Type: xdr.ScValTypeScvU32, + U32: &keyUint32, + }, + Durability: xdr.ContractDataDurabilityPersistent, + Val: xdr.ScVal{ + Type: xdr.ScValTypeScvU32, + U32: &keyUint32, + }, + } + _, entry := getContractDataLedgerEntry(b, data) + const numEntriesPerOp = 3500 + b.ResetTimer() + for i := 0; i < b.N; i++ { + tx, err := NewReadWriter(db, 150, 15).NewTx(context.Background()) + assert.NoError(b, err) + writer := tx.LedgerEntryWriter() + for j := 0; j < numEntriesPerOp; j++ { + keyUint32 = xdr.Uint32(j) + assert.NoError(b, writer.UpsertLedgerEntry(entry)) + } + assert.NoError(b, tx.Commit(uint32(i+1))) + } +} + +func NewTestDB(tb testing.TB) *DB { + tmp := tb.TempDir() + dbPath := path.Join(tmp, "db.sqlite") + db, err := OpenSQLiteDB(dbPath) + if err != nil { + assert.NoError(tb, db.Close()) + } + tb.Cleanup(func() { + assert.NoError(tb, db.Close()) + }) + return &DB{ + SessionInterface: db, + cache: dbCache{ + ledgerEntries: newTransactionalCache(), + }, + } +} diff --git a/cmd/soroban-rpc/internal/db/migrations/01_init.sql b/cmd/soroban-rpc/internal/db/migrations/01_init.sql new file mode 100644 index 00000000..54c7e1cb --- /dev/null +++ b/cmd/soroban-rpc/internal/db/migrations/01_init.sql @@ -0,0 +1,22 @@ +-- +migrate Up +CREATE TABLE ledger_entries ( + key BLOB NOT NULL PRIMARY KEY, + entry BLOB NOT NULL +); + +-- metadata key-value store +CREATE TABLE metadata ( + key TEXT NOT NULL PRIMARY KEY, + value TEXT NOT NULL +); + +-- table to store all ledgers +CREATE TABLE ledger_close_meta ( + sequence INTEGER NOT NULL PRIMARY KEY, + meta BLOB NOT NULL +); + +-- +migrate Down +drop table ledger_entries cascade; +drop table ledger_entries_meta cascade; +drop table ledger_close_meta cascade; diff --git a/cmd/soroban-rpc/internal/db/transactionalcache.go b/cmd/soroban-rpc/internal/db/transactionalcache.go new file mode 100644 index 00000000..dd14aa41 --- /dev/null +++ b/cmd/soroban-rpc/internal/db/transactionalcache.go @@ -0,0 +1,65 @@ +package db + +type transactionalCache struct { + entries map[string]string +} + +func newTransactionalCache() transactionalCache { + return transactionalCache{entries: map[string]string{}} +} + +func (c transactionalCache) newReadTx() transactionalCacheReadTx { + entries := make(map[string]*string, len(c.entries)) + for k, v := range c.entries { + localV := v + entries[k] = &localV + } + return transactionalCacheReadTx{entries: entries} +} + +func (c transactionalCache) newWriteTx(estimatedWriteCount int) transactionalCacheWriteTx { + return transactionalCacheWriteTx{ + pendingUpdates: make(map[string]*string, estimatedWriteCount), + parent: &c, + } +} + +// nil indicates not present in the underlying storage +type transactionalCacheReadTx struct { + entries map[string]*string +} + +// nil indicates not present in the underlying storage +func (r transactionalCacheReadTx) get(key string) (*string, bool) { + val, ok := r.entries[key] + return val, ok +} + +// nil indicates not present in the underlying storage +func (r transactionalCacheReadTx) upsert(key string, value *string) { + r.entries[key] = value +} + +type transactionalCacheWriteTx struct { + // nil indicates deletion + pendingUpdates map[string]*string + parent *transactionalCache +} + +func (w transactionalCacheWriteTx) upsert(key, val string) { + w.pendingUpdates[key] = &val +} + +func (w transactionalCacheWriteTx) delete(key string) { + w.pendingUpdates[key] = nil +} + +func (w transactionalCacheWriteTx) commit() { + for key, newValue := range w.pendingUpdates { + if newValue == nil { + delete(w.parent.entries, key) + } else { + w.parent.entries[key] = *newValue + } + } +} diff --git a/cmd/soroban-rpc/internal/events/cursor.go b/cmd/soroban-rpc/internal/events/cursor.go new file mode 100644 index 00000000..9f37b513 --- /dev/null +++ b/cmd/soroban-rpc/internal/events/cursor.go @@ -0,0 +1,122 @@ +package events + +import ( + "encoding/json" + "fmt" + "math" + "strconv" + "strings" + + "github.com/stellar/go/toid" +) + +// Cursor represents the position of a Soroban event. +// Soroban events are sorted in ascending order by +// ledger sequence, transaction index, operation index, +// and event index. +type Cursor struct { + // Ledger is the sequence of the ledger which emitted the event. + Ledger uint32 + // Tx is the index of the transaction within the ledger which emitted the event. + Tx uint32 + // Op is the index of the operation within the transaction which emitted the event. + Op uint32 + // Event is the index of the event within in the operation which emitted the event. + Event uint32 +} + +// String returns a string representation of this cursor +func (c Cursor) String() string { + return fmt.Sprintf( + "%019d-%010d", + toid.New(int32(c.Ledger), int32(c.Tx), int32(c.Op)).ToInt64(), + c.Event, + ) +} + +// MarshalJSON marshals the cursor into JSON +func (c Cursor) MarshalJSON() ([]byte, error) { + return json.Marshal(c.String()) +} + +// UnmarshalJSON unmarshalls a cursor from the given JSON +func (c *Cursor) UnmarshalJSON(b []byte) error { + var s string + if err := json.Unmarshal(b, &s); err != nil { + return err + } + + if parsed, err := ParseCursor(s); err != nil { + return err + } else { + *c = parsed + } + return nil +} + +// ParseCursor parses the given string and returns the corresponding cursor +func ParseCursor(input string) (Cursor, error) { + parts := strings.SplitN(input, "-", 2) + if len(parts) != 2 { + return Cursor{}, fmt.Errorf("invalid event id %s", input) + } + + // Parse the first part (toid) + idInt, err := strconv.ParseInt(parts[0], 10, 64) //lint:ignore gomnd + if err != nil { + return Cursor{}, fmt.Errorf("invalid event id %s: %w", input, err) + } + parsed := toid.Parse(idInt) + + // Parse the second part (event order) + eventOrder, err := strconv.ParseUint(parts[1], 10, 32) //lint:ignore gomnd + if err != nil { + return Cursor{}, fmt.Errorf("invalid event id %s: %w", input, err) + } + + return Cursor{ + Ledger: uint32(parsed.LedgerSequence), + Tx: uint32(parsed.TransactionOrder), + Op: uint32(parsed.OperationOrder), + Event: uint32(eventOrder), + }, nil +} + +func cmp(a, b uint32) int { + if a < b { + return -1 + } + if a > b { + return 1 + } + return 0 +} + +// Cmp compares two cursors. +// 0 is returned if the c is equal to other. +// 1 is returned if c is greater than other. +// -1 is returned if c is less than other. +func (c Cursor) Cmp(other Cursor) int { + if c.Ledger == other.Ledger { + if c.Tx == other.Tx { + if c.Op == other.Op { + return cmp(c.Event, other.Event) + } + return cmp(c.Op, other.Op) + } + return cmp(c.Tx, other.Tx) + } + return cmp(c.Ledger, other.Ledger) +} + +var ( + // MinCursor is the smallest possible cursor + MinCursor = Cursor{} + // MaxCursor is the largest possible cursor + MaxCursor = Cursor{ + Ledger: math.MaxUint32, + Tx: math.MaxUint32, + Op: math.MaxUint32, + Event: math.MaxUint32, + } +) diff --git a/cmd/soroban-rpc/internal/events/cursor_test.go b/cmd/soroban-rpc/internal/events/cursor_test.go new file mode 100644 index 00000000..6dfe1e58 --- /dev/null +++ b/cmd/soroban-rpc/internal/events/cursor_test.go @@ -0,0 +1,109 @@ +package events + +import ( + "encoding/json" + "math" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestParseCursor(t *testing.T) { + for _, cursor := range []Cursor{ + { + Ledger: math.MaxInt32, + Tx: 1048575, + Op: 4095, + Event: math.MaxInt32, + }, + { + Ledger: 0, + Tx: 0, + Op: 0, + Event: 0, + }, + { + Ledger: 123, + Tx: 10, + Op: 5, + Event: 1, + }, + } { + parsed, err := ParseCursor(cursor.String()) + assert.NoError(t, err) + assert.Equal(t, cursor, parsed) + } +} + +func TestCursorJSON(t *testing.T) { + type options struct { + Cursor *Cursor `json:"cursor,omitempty"` + Limit uint `json:"limit,omitempty"` + } + for _, testCase := range []options{ + {nil, 100}, + {nil, 0}, + {&Cursor{ + Ledger: 1, + Tx: 2, + Op: 3, + Event: 4, + }, 100}, + } { + result, err := json.Marshal(testCase) + assert.NoError(t, err) + var parsed options + assert.NoError(t, json.Unmarshal(result, &parsed)) + assert.Equal(t, testCase, parsed) + } +} + +func TestCursorCmp(t *testing.T) { + for _, testCase := range []struct { + a Cursor + b Cursor + expected int + }{ + {MinCursor, MaxCursor, -1}, + {MinCursor, MinCursor, 0}, + {MaxCursor, MaxCursor, 0}, + { + Cursor{Ledger: 1, Tx: 2, Op: 3, Event: 4}, + Cursor{Ledger: 1, Tx: 2, Op: 3, Event: 4}, + 0, + }, + { + Cursor{Ledger: 5, Tx: 2, Op: 3, Event: 4}, + Cursor{Ledger: 7, Tx: 2, Op: 3, Event: 4}, + -1, + }, + { + Cursor{Ledger: 5, Tx: 2, Op: 3, Event: 4}, + Cursor{Ledger: 5, Tx: 7, Op: 3, Event: 4}, + -1, + }, + { + Cursor{Ledger: 5, Tx: 2, Op: 3, Event: 4}, + Cursor{Ledger: 5, Tx: 2, Op: 7, Event: 4}, + -1, + }, + { + Cursor{Ledger: 5, Tx: 2, Op: 3, Event: 4}, + Cursor{Ledger: 5, Tx: 2, Op: 3, Event: 7}, + -1, + }, + } { + a := testCase.a + b := testCase.b + expected := testCase.expected + + if got := a.Cmp(b); got != expected { + t.Fatalf("expected (%v).Cmp(%v) to be %v but got %v", a, b, expected, got) + } + a, b = b, a + expected *= -1 + if got := a.Cmp(b); got != expected { + t.Fatalf("expected (%v).Cmp(%v) to be %v but got %v", a, b, expected, got) + } + } +} diff --git a/cmd/soroban-rpc/internal/events/events.go b/cmd/soroban-rpc/internal/events/events.go new file mode 100644 index 00000000..0c5fdc83 --- /dev/null +++ b/cmd/soroban-rpc/internal/events/events.go @@ -0,0 +1,257 @@ +package events + +import ( + "errors" + "io" + "sort" + "sync" + "time" + + "github.com/prometheus/client_golang/prometheus" + "github.com/stellar/go/ingest" + "github.com/stellar/go/xdr" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/daemon/interfaces" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/ledgerbucketwindow" +) + +type bucket struct { + ledgerSeq uint32 + ledgerCloseTimestamp int64 + events []event +} + +type event struct { + contents xdr.DiagnosticEvent + txIndex uint32 + opIndex uint32 + eventIndex uint32 +} + +func (e event) cursor(ledgerSeq uint32) Cursor { + return Cursor{ + Ledger: ledgerSeq, + Tx: e.txIndex, + Op: e.opIndex, + Event: e.eventIndex, + } +} + +// MemoryStore is an in-memory store of soroban events. +type MemoryStore struct { + // networkPassphrase is an immutable string containing the + // Stellar network passphrase. + // Accessing networkPassphrase does not need to be protected + // by the lock + networkPassphrase string + // lock protects the mutable fields below + lock sync.RWMutex + eventsByLedger *ledgerbucketwindow.LedgerBucketWindow[[]event] + eventsDurationMetric *prometheus.SummaryVec + eventCountMetric prometheus.Summary +} + +// NewMemoryStore creates a new MemoryStore. +// The retention window is in units of ledgers. +// All events occurring in the following ledger range +// [ latestLedger - retentionWindow, latestLedger ] +// will be included in the MemoryStore. If the MemoryStore +// is full, any events from new ledgers will evict +// older entries outside the retention window. +func NewMemoryStore(daemon interfaces.Daemon, networkPassphrase string, retentionWindow uint32) *MemoryStore { + window := ledgerbucketwindow.NewLedgerBucketWindow[[]event](retentionWindow) + + // eventsDurationMetric is a metric for measuring latency of event store operations + eventsDurationMetric := prometheus.NewSummaryVec(prometheus.SummaryOpts{ + Namespace: daemon.MetricsNamespace(), Subsystem: "events", Name: "operation_duration_seconds", + Help: "event store operation durations, sliding window = 10m", + Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001}, + }, + []string{"operation"}, + ) + + eventCountMetric := prometheus.NewSummary(prometheus.SummaryOpts{ + Namespace: daemon.MetricsNamespace(), Subsystem: "events", Name: "count", + Help: "count of events ingested, sliding window = 10m", + Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001}, + }) + daemon.MetricsRegistry().MustRegister(eventCountMetric, eventsDurationMetric) + return &MemoryStore{ + networkPassphrase: networkPassphrase, + eventsByLedger: window, + eventsDurationMetric: eventsDurationMetric, + eventCountMetric: eventCountMetric, + } +} + +// Range defines a [Start, End) interval of Soroban events. +type Range struct { + // Start defines the (inclusive) start of the range. + Start Cursor + // ClampStart indicates whether Start should be clamped up + // to the earliest ledger available if Start is too low. + ClampStart bool + // End defines the (exclusive) end of the range. + End Cursor + // ClampEnd indicates whether End should be clamped down + // to the latest ledger available if End is too high. + ClampEnd bool +} + +// Scan applies f on all the events occurring in the given range. +// The events are processed in sorted ascending Cursor order. +// If f returns false, the scan terminates early (f will not be applied on +// remaining events in the range). Note that a read lock is held for the +// entire duration of the Scan function so f should be written in a way +// to minimize latency. +func (m *MemoryStore) Scan(eventRange Range, f func(xdr.DiagnosticEvent, Cursor, int64) bool) (uint32, error) { + startTime := time.Now() + m.lock.RLock() + defer m.lock.RUnlock() + + if err := m.validateRange(&eventRange); err != nil { + return 0, err + } + + firstLedgerInRange := eventRange.Start.Ledger + firstLedgerInWindow := m.eventsByLedger.Get(0).LedgerSeq + lastLedgerInWindow := firstLedgerInWindow + (m.eventsByLedger.Len() - 1) + for i := firstLedgerInRange - firstLedgerInWindow; i < m.eventsByLedger.Len(); i++ { + bucket := m.eventsByLedger.Get(i) + events := bucket.BucketContent + if bucket.LedgerSeq == firstLedgerInRange { + // we need to seek for the beginning of the events in the first bucket in the range + events = seek(events, eventRange.Start) + } + timestamp := bucket.LedgerCloseTimestamp + for _, event := range events { + cur := event.cursor(bucket.LedgerSeq) + if eventRange.End.Cmp(cur) <= 0 { + return lastLedgerInWindow, nil + } + if !f(event.contents, cur, timestamp) { + return lastLedgerInWindow, nil + } + } + } + m.eventsDurationMetric.With(prometheus.Labels{"operation": "scan"}). + Observe(time.Since(startTime).Seconds()) + return lastLedgerInWindow, nil +} + +// validateRange checks if the range falls within the bounds +// of the events in the memory store. +// validateRange should be called with the read lock. +func (m *MemoryStore) validateRange(eventRange *Range) error { + if m.eventsByLedger.Len() == 0 { + return errors.New("event store is empty") + } + firstBucket := m.eventsByLedger.Get(0) + min := Cursor{Ledger: firstBucket.LedgerSeq} + if eventRange.Start.Cmp(min) < 0 { + if eventRange.ClampStart { + eventRange.Start = min + } else { + return errors.New("start is before oldest ledger") + } + } + max := Cursor{Ledger: min.Ledger + m.eventsByLedger.Len()} + if eventRange.Start.Cmp(max) >= 0 { + return errors.New("start is after newest ledger") + } + if eventRange.End.Cmp(max) > 0 { + if eventRange.ClampEnd { + eventRange.End = max + } else { + return errors.New("end is after latest ledger") + } + } + + if eventRange.Start.Cmp(eventRange.End) >= 0 { + return errors.New("start is not before end") + } + + return nil +} + +// seek returns the subset of all events which occur +// at a point greater than or equal to the given cursor. +// events must be sorted in ascending order. +func seek(events []event, cursor Cursor) []event { + j := sort.Search(len(events), func(i int) bool { + return cursor.Cmp(events[i].cursor(cursor.Ledger)) <= 0 + }) + return events[j:] +} + +// IngestEvents adds new events from the given ledger into the store. +// As a side effect, events which fall outside the retention window are +// removed from the store. +func (m *MemoryStore) IngestEvents(ledgerCloseMeta xdr.LedgerCloseMeta) error { + startTime := time.Now() + // no need to acquire the lock because the networkPassphrase field + // is immutable + events, err := readEvents(m.networkPassphrase, ledgerCloseMeta) + if err != nil { + return err + } + bucket := ledgerbucketwindow.LedgerBucket[[]event]{ + LedgerSeq: ledgerCloseMeta.LedgerSequence(), + LedgerCloseTimestamp: int64(ledgerCloseMeta.LedgerHeaderHistoryEntry().Header.ScpValue.CloseTime), + BucketContent: events, + } + m.lock.Lock() + m.eventsByLedger.Append(bucket) + m.lock.Unlock() + m.eventsDurationMetric.With(prometheus.Labels{"operation": "ingest"}). + Observe(time.Since(startTime).Seconds()) + m.eventCountMetric.Observe(float64(len(events))) + return nil +} + +func readEvents(networkPassphrase string, ledgerCloseMeta xdr.LedgerCloseMeta) (events []event, err error) { + var txReader *ingest.LedgerTransactionReader + txReader, err = ingest.NewLedgerTransactionReaderFromLedgerCloseMeta(networkPassphrase, ledgerCloseMeta) + if err != nil { + return + } + defer func() { + closeErr := txReader.Close() + if err == nil { + err = closeErr + } + }() + + for { + var tx ingest.LedgerTransaction + tx, err = txReader.Read() + if err == io.EOF { + err = nil + break + } + if err != nil { + return + } + + if !tx.Result.Successful() { + continue + } + txEvents, err := tx.GetDiagnosticEvents() + if err != nil { + return nil, err + } + for index, e := range txEvents { + events = append(events, event{ + contents: e, + txIndex: tx.Index, + // NOTE: we cannot really index by operation since all events + // are provided as part of the transaction. However, + // that shouldn't matter in practice since a transaction + // can only contain a single Host Function Invocation. + opIndex: 0, + eventIndex: uint32(index), + }) + } + } + return events, err +} diff --git a/cmd/soroban-rpc/internal/events/events_test.go b/cmd/soroban-rpc/internal/events/events_test.go new file mode 100644 index 00000000..9f6a3fe0 --- /dev/null +++ b/cmd/soroban-rpc/internal/events/events_test.go @@ -0,0 +1,392 @@ +package events + +import ( + "testing" + + "github.com/stellar/go/xdr" + "github.com/stretchr/testify/require" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/daemon/interfaces" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/ledgerbucketwindow" +) + +var ( + ledger5CloseTime = ledgerCloseTime(5) + ledger5Events = []event{ + newEvent(1, 0, 0, 100), + newEvent(1, 0, 1, 200), + newEvent(2, 0, 0, 300), + newEvent(2, 1, 0, 400), + } + ledger6CloseTime = ledgerCloseTime(6) + ledger6Events []event = nil + ledger7CloseTime = ledgerCloseTime(7) + ledger7Events = []event{ + newEvent(1, 0, 0, 500), + } + ledger8CloseTime = ledgerCloseTime(8) + ledger8Events = []event{ + newEvent(1, 0, 0, 600), + newEvent(2, 0, 0, 700), + newEvent(2, 0, 1, 800), + newEvent(2, 0, 2, 900), + newEvent(2, 1, 0, 1000), + } +) + +func ledgerCloseTime(seq uint32) int64 { + return int64(seq)*25 + 100 +} + +func newEvent(txIndex, opIndex, eventIndex, val uint32) event { + v := xdr.Uint32(val) + return event{ + contents: xdr.DiagnosticEvent{ + InSuccessfulContractCall: true, + Event: xdr.ContractEvent{ + Type: xdr.ContractEventTypeSystem, + Body: xdr.ContractEventBody{ + V: 0, + V0: &xdr.ContractEventV0{ + Data: xdr.ScVal{ + Type: xdr.ScValTypeScvU32, + U32: &v, + }, + }, + }, + }, + }, + txIndex: txIndex, + opIndex: opIndex, + eventIndex: eventIndex, + } +} + +func mustMarshal(e xdr.DiagnosticEvent) string { + result, err := xdr.MarshalBase64(e) + if err != nil { + panic(err) + } + return result +} + +func (e event) equals(other event) bool { + return e.txIndex == other.txIndex && + e.opIndex == other.opIndex && + e.eventIndex == other.eventIndex && + mustMarshal(e.contents) == mustMarshal(other.contents) +} + +func eventsAreEqual(t *testing.T, a, b []event) { + require.Equal(t, len(a), len(b)) + for i := range a { + require.True(t, a[i].equals(b[i])) + } +} + +func TestScanRangeValidation(t *testing.T) { + m := NewMemoryStore(interfaces.MakeNoOpDeamon(), "unit-tests", 4) + assertNoCalls := func(contractEvent xdr.DiagnosticEvent, cursor Cursor, timestamp int64) bool { + t.Fatalf("unexpected call") + return true + } + _, err := m.Scan(Range{ + Start: MinCursor, + ClampStart: true, + End: MaxCursor, + ClampEnd: true, + }, assertNoCalls) + require.EqualError(t, err, "event store is empty") + + m = createStore(t) + + for _, testCase := range []struct { + input Range + err string + }{ + { + Range{ + Start: MinCursor, + ClampStart: false, + End: MaxCursor, + ClampEnd: true, + }, + "start is before oldest ledger", + }, + { + Range{ + Start: Cursor{Ledger: 4}, + ClampStart: false, + End: MaxCursor, + ClampEnd: true, + }, + "start is before oldest ledger", + }, + { + Range{ + Start: MinCursor, + ClampStart: true, + End: MaxCursor, + ClampEnd: false, + }, + "end is after latest ledger", + }, + { + Range{ + Start: Cursor{Ledger: 5}, + ClampStart: true, + End: Cursor{Ledger: 10}, + ClampEnd: false, + }, + "end is after latest ledger", + }, + { + Range{ + Start: Cursor{Ledger: 10}, + ClampStart: true, + End: Cursor{Ledger: 3}, + ClampEnd: true, + }, + "start is after newest ledger", + }, + { + Range{ + Start: Cursor{Ledger: 10}, + ClampStart: false, + End: Cursor{Ledger: 3}, + ClampEnd: false, + }, + "start is after newest ledger", + }, + { + Range{ + Start: Cursor{Ledger: 9}, + ClampStart: false, + End: Cursor{Ledger: 10}, + ClampEnd: true, + }, + "start is after newest ledger", + }, + { + Range{ + Start: Cursor{Ledger: 9}, + ClampStart: false, + End: Cursor{Ledger: 10}, + ClampEnd: false, + }, + "start is after newest ledger", + }, + { + Range{ + Start: Cursor{Ledger: 2}, + ClampStart: true, + End: Cursor{Ledger: 3}, + ClampEnd: false, + }, + "start is not before end", + }, + { + Range{ + Start: Cursor{Ledger: 2}, + ClampStart: false, + End: Cursor{Ledger: 3}, + ClampEnd: false, + }, + "start is before oldest ledger", + }, + { + Range{ + Start: Cursor{Ledger: 6}, + ClampStart: false, + End: Cursor{Ledger: 6}, + ClampEnd: false, + }, + "start is not before end", + }, + } { + _, err := m.Scan(testCase.input, assertNoCalls) + require.EqualError(t, err, testCase.err, testCase.input) + } +} + +func createStore(t *testing.T) *MemoryStore { + m := NewMemoryStore(interfaces.MakeNoOpDeamon(), "unit-tests", 4) + m.eventsByLedger.Append(ledgerbucketwindow.LedgerBucket[[]event]{ + LedgerSeq: 5, + LedgerCloseTimestamp: ledger5CloseTime, + BucketContent: ledger5Events, + }) + m.eventsByLedger.Append(ledgerbucketwindow.LedgerBucket[[]event]{ + LedgerSeq: 6, + LedgerCloseTimestamp: ledger6CloseTime, + BucketContent: nil, + }) + m.eventsByLedger.Append(ledgerbucketwindow.LedgerBucket[[]event]{ + LedgerSeq: 7, + LedgerCloseTimestamp: ledger7CloseTime, + BucketContent: ledger7Events, + }) + m.eventsByLedger.Append(ledgerbucketwindow.LedgerBucket[[]event]{ + LedgerSeq: 8, + LedgerCloseTimestamp: ledger8CloseTime, + BucketContent: ledger8Events, + }) + + return m +} + +func concat(slices ...[]event) []event { + var result []event + for _, slice := range slices { + result = append(result, slice...) + } + return result +} + +func TestScan(t *testing.T) { + m := createStore(t) + + genEquivalentInputs := func(input Range) []Range { + results := []Range{input} + if !input.ClampStart { + rangeCopy := input + rangeCopy.ClampStart = true + results = append(results, rangeCopy) + } + if !input.ClampEnd { + rangeCopy := input + rangeCopy.ClampEnd = true + results = append(results, rangeCopy) + } + if !input.ClampStart && !input.ClampEnd { + rangeCopy := input + rangeCopy.ClampStart = true + rangeCopy.ClampEnd = true + results = append(results, rangeCopy) + } + return results + } + + for _, testCase := range []struct { + input Range + expected []event + }{ + { + Range{ + Start: MinCursor, + ClampStart: true, + End: MaxCursor, + ClampEnd: true, + }, + concat(ledger5Events, ledger6Events, ledger7Events, ledger8Events), + }, + { + Range{ + Start: Cursor{Ledger: 5}, + ClampStart: false, + End: Cursor{Ledger: 9}, + ClampEnd: false, + }, + concat(ledger5Events, ledger6Events, ledger7Events, ledger8Events), + }, + { + Range{ + Start: Cursor{Ledger: 5, Tx: 1, Op: 2}, + ClampStart: false, + End: Cursor{Ledger: 9}, + ClampEnd: false, + }, + concat(ledger5Events[2:], ledger6Events, ledger7Events, ledger8Events), + }, + { + Range{ + Start: Cursor{Ledger: 5, Tx: 3}, + ClampStart: false, + End: MaxCursor, + ClampEnd: true, + }, + concat(ledger6Events, ledger7Events, ledger8Events), + }, + { + Range{ + Start: Cursor{Ledger: 6}, + ClampStart: false, + End: MaxCursor, + ClampEnd: true, + }, + concat(ledger7Events, ledger8Events), + }, + { + Range{ + Start: Cursor{Ledger: 6, Tx: 1}, + ClampStart: false, + End: MaxCursor, + ClampEnd: true, + }, + concat(ledger7Events, ledger8Events), + }, + { + Range{ + Start: Cursor{Ledger: 8, Tx: 2, Op: 1, Event: 0}, + ClampStart: false, + End: MaxCursor, + ClampEnd: true, + }, + ledger8Events[len(ledger8Events)-1:], + }, + { + Range{ + Start: Cursor{Ledger: 8, Tx: 2, Op: 1, Event: 0}, + ClampStart: false, + End: Cursor{Ledger: 9}, + ClampEnd: false, + }, + ledger8Events[len(ledger8Events)-1:], + }, + { + Range{ + Start: Cursor{Ledger: 5}, + ClampStart: false, + End: Cursor{Ledger: 7}, + ClampEnd: false, + }, + concat(ledger5Events, ledger6Events), + }, + { + Range{ + Start: Cursor{Ledger: 5, Tx: 1, Op: 2}, + ClampStart: false, + End: Cursor{Ledger: 8, Tx: 1, Op: 4}, + ClampEnd: false, + }, + concat(ledger5Events[2:], ledger6Events, ledger7Events, ledger8Events[:1]), + }, + } { + for _, input := range genEquivalentInputs(testCase.input) { + var events []event + iterateAll := true + f := func(contractEvent xdr.DiagnosticEvent, cursor Cursor, ledgerCloseTimestamp int64) bool { + require.Equal(t, ledgerCloseTime(cursor.Ledger), ledgerCloseTimestamp) + events = append(events, event{ + contents: contractEvent, + txIndex: cursor.Tx, + opIndex: cursor.Op, + eventIndex: cursor.Event, + }) + return iterateAll + } + latest, err := m.Scan(input, f) + require.NoError(t, err) + require.Equal(t, uint32(8), latest) + eventsAreEqual(t, testCase.expected, events) + if len(events) > 0 { + events = nil + iterateAll = false + latest, err := m.Scan(input, f) + require.NoError(t, err) + require.Equal(t, uint32(8), latest) + eventsAreEqual(t, []event{testCase.expected[0]}, events) + } + } + } +} diff --git a/cmd/soroban-rpc/internal/ingest/ledgerentry.go b/cmd/soroban-rpc/internal/ingest/ledgerentry.go new file mode 100644 index 00000000..0b8fc48f --- /dev/null +++ b/cmd/soroban-rpc/internal/ingest/ledgerentry.go @@ -0,0 +1,87 @@ +package ingest + +import ( + "context" + "io" + "strings" + "time" + + "github.com/prometheus/client_golang/prometheus" + "github.com/stellar/go/ingest" + "github.com/stellar/go/xdr" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/db" +) + +func (s *Service) ingestLedgerEntryChanges(ctx context.Context, reader ingest.ChangeReader, tx db.WriteTx, progressLogPeriod int) error { + entryCount := 0 + startTime := time.Now() + writer := tx.LedgerEntryWriter() + + changeStatsProcessor := ingest.StatsChangeProcessor{} + for ctx.Err() == nil { + if change, err := reader.Read(); err == io.EOF { + return nil + } else if err != nil { + return err + } else if err = ingestLedgerEntryChange(writer, change); err != nil { + return err + } else if err = changeStatsProcessor.ProcessChange(ctx, change); err != nil { + return err + } + entryCount++ + if progressLogPeriod > 0 && entryCount%progressLogPeriod == 0 { + s.logger.Infof("processed %d ledger entry changes", entryCount) + } + } + + results := changeStatsProcessor.GetResults() + for stat, value := range results.Map() { + stat = strings.Replace(stat, "stats_", "change_", 1) + s.ledgerStatsMetric. + With(prometheus.Labels{"type": stat}).Add(float64(value.(int64))) + } + s.ingestionDurationMetric. + With(prometheus.Labels{"type": "ledger_entries"}).Observe(time.Since(startTime).Seconds()) + return ctx.Err() +} + +func (s *Service) ingestTempLedgerEntryEvictions( + ctx context.Context, + evictedTempLedgerKeys []xdr.LedgerKey, + tx db.WriteTx, +) error { + startTime := time.Now() + writer := tx.LedgerEntryWriter() + counts := map[string]int{} + + for _, key := range evictedTempLedgerKeys { + if err := writer.DeleteLedgerEntry(key); err != nil { + return err + } + counts["evicted_"+key.Type.String()]++ + if ctx.Err() != nil { + return ctx.Err() + } + } + + for evictionType, count := range counts { + s.ledgerStatsMetric. + With(prometheus.Labels{"type": evictionType}).Add(float64(count)) + } + s.ingestionDurationMetric. + With(prometheus.Labels{"type": "evicted_temp_ledger_entries"}).Observe(time.Since(startTime).Seconds()) + return ctx.Err() +} + +func ingestLedgerEntryChange(writer db.LedgerEntryWriter, change ingest.Change) error { + if change.Post == nil { + ledgerKey, err := xdr.GetLedgerKeyFromData(change.Pre.Data) + if err != nil { + return err + } + return writer.DeleteLedgerEntry(ledgerKey) + } else { + return writer.UpsertLedgerEntry(*change.Post) + } +} diff --git a/cmd/soroban-rpc/internal/ingest/mock_db_test.go b/cmd/soroban-rpc/internal/ingest/mock_db_test.go new file mode 100644 index 00000000..221bdc70 --- /dev/null +++ b/cmd/soroban-rpc/internal/ingest/mock_db_test.go @@ -0,0 +1,78 @@ +package ingest + +import ( + "context" + + "github.com/stellar/go/xdr" + "github.com/stretchr/testify/mock" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/db" +) + +var ( + _ db.ReadWriter = (*MockDB)(nil) + _ db.WriteTx = (*MockTx)(nil) + _ db.LedgerEntryWriter = (*MockLedgerEntryWriter)(nil) + _ db.LedgerWriter = (*MockLedgerWriter)(nil) +) + +type MockDB struct { + mock.Mock +} + +func (m MockDB) NewTx(ctx context.Context) (db.WriteTx, error) { + args := m.Called(ctx) + return args.Get(0).(db.WriteTx), args.Error(1) +} + +func (m MockDB) GetLatestLedgerSequence(ctx context.Context) (uint32, error) { + args := m.Called(ctx) + return args.Get(0).(uint32), args.Error(1) +} + +type MockTx struct { + mock.Mock +} + +func (m MockTx) LedgerEntryWriter() db.LedgerEntryWriter { + args := m.Called() + return args.Get(0).(db.LedgerEntryWriter) +} + +func (m MockTx) LedgerWriter() db.LedgerWriter { + args := m.Called() + return args.Get(0).(db.LedgerWriter) +} + +func (m MockTx) Commit(ledgerSeq uint32) error { + args := m.Called(ledgerSeq) + return args.Error(0) +} + +func (m MockTx) Rollback() error { + args := m.Called() + return args.Error(0) +} + +type MockLedgerEntryWriter struct { + mock.Mock +} + +func (m MockLedgerEntryWriter) UpsertLedgerEntry(entry xdr.LedgerEntry) error { + args := m.Called(entry) + return args.Error(0) +} + +func (m MockLedgerEntryWriter) DeleteLedgerEntry(key xdr.LedgerKey) error { + args := m.Called(key) + return args.Error(0) +} + +type MockLedgerWriter struct { + mock.Mock +} + +func (m MockLedgerWriter) InsertLedger(ledger xdr.LedgerCloseMeta) error { + args := m.Called(ledger) + return args.Error(0) +} diff --git a/cmd/soroban-rpc/internal/ingest/service.go b/cmd/soroban-rpc/internal/ingest/service.go new file mode 100644 index 00000000..6240f7cb --- /dev/null +++ b/cmd/soroban-rpc/internal/ingest/service.go @@ -0,0 +1,311 @@ +package ingest + +import ( + "context" + "errors" + "fmt" + "sync" + "time" + + "github.com/cenkalti/backoff/v4" + "github.com/prometheus/client_golang/prometheus" + "github.com/stellar/go/historyarchive" + "github.com/stellar/go/ingest" + backends "github.com/stellar/go/ingest/ledgerbackend" + supportdb "github.com/stellar/go/support/db" + "github.com/stellar/go/support/log" + "github.com/stellar/go/xdr" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/daemon/interfaces" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/db" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/util" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/events" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/transactions" +) + +const ( + ledgerEntryBaselineProgressLogPeriod = 10000 +) + +var errEmptyArchives = fmt.Errorf("cannot start ingestion without history archives, wait until first history archives are published") + +type Config struct { + Logger *log.Entry + DB db.ReadWriter + EventStore *events.MemoryStore + TransactionStore *transactions.MemoryStore + NetworkPassPhrase string + Archive historyarchive.ArchiveInterface + LedgerBackend backends.LedgerBackend + Timeout time.Duration + OnIngestionRetry backoff.Notify + Daemon interfaces.Daemon +} + +func NewService(cfg Config) *Service { + service := newService(cfg) + startService(service, cfg) + return service +} + +func newService(cfg Config) *Service { + // ingestionDurationMetric is a metric for measuring the latency of ingestion + ingestionDurationMetric := prometheus.NewSummaryVec(prometheus.SummaryOpts{ + Namespace: cfg.Daemon.MetricsNamespace(), Subsystem: "ingest", Name: "ledger_ingestion_duration_seconds", + Help: "ledger ingestion durations, sliding window = 10m", + Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001}, + }, + []string{"type"}, + ) + // latestLedgerMetric is a metric for measuring the latest ingested ledger + latestLedgerMetric := prometheus.NewGauge(prometheus.GaugeOpts{ + Namespace: cfg.Daemon.MetricsNamespace(), Subsystem: "ingest", Name: "local_latest_ledger", + Help: "sequence number of the latest ledger ingested by this ingesting instance", + }) + + // ledgerStatsMetric is a metric which measures statistics on all ledger entries ingested by soroban rpc + ledgerStatsMetric := prometheus.NewCounterVec( + prometheus.CounterOpts{ + Namespace: cfg.Daemon.MetricsNamespace(), Subsystem: "ingest", Name: "ledger_stats_total", + Help: "counters of different ledger stats", + }, + []string{"type"}, + ) + + cfg.Daemon.MetricsRegistry().MustRegister(ingestionDurationMetric, latestLedgerMetric, ledgerStatsMetric) + + service := &Service{ + logger: cfg.Logger, + db: cfg.DB, + eventStore: cfg.EventStore, + transactionStore: cfg.TransactionStore, + ledgerBackend: cfg.LedgerBackend, + networkPassPhrase: cfg.NetworkPassPhrase, + timeout: cfg.Timeout, + ingestionDurationMetric: ingestionDurationMetric, + latestLedgerMetric: latestLedgerMetric, + ledgerStatsMetric: ledgerStatsMetric, + } + + return service +} + +func startService(service *Service, cfg Config) { + ctx, done := context.WithCancel(context.Background()) + service.done = done + service.wg.Add(1) + panicGroup := util.UnrecoverablePanicGroup.Log(cfg.Logger) + panicGroup.Go(func() { + defer service.wg.Done() + // Retry running ingestion every second for 5 seconds. + constantBackoff := backoff.WithMaxRetries(backoff.NewConstantBackOff(1*time.Second), 5) + // Don't want to keep retrying if the context gets canceled. + contextBackoff := backoff.WithContext(constantBackoff, ctx) + err := backoff.RetryNotify( + func() error { + err := service.run(ctx, cfg.Archive) + if errors.Is(err, errEmptyArchives) { + // keep retrying until history archives are published + constantBackoff.Reset() + } + return err + }, + contextBackoff, + cfg.OnIngestionRetry) + if err != nil && !errors.Is(err, context.Canceled) { + service.logger.WithError(err).Fatal("could not run ingestion") + } + }) +} + +type Service struct { + logger *log.Entry + db db.ReadWriter + eventStore *events.MemoryStore + transactionStore *transactions.MemoryStore + ledgerBackend backends.LedgerBackend + timeout time.Duration + networkPassPhrase string + done context.CancelFunc + wg sync.WaitGroup + ingestionDurationMetric *prometheus.SummaryVec + latestLedgerMetric prometheus.Gauge + ledgerStatsMetric *prometheus.CounterVec +} + +func (s *Service) Close() error { + s.done() + s.wg.Wait() + return nil +} + +func (s *Service) run(ctx context.Context, archive historyarchive.ArchiveInterface) error { + // Create a ledger-entry baseline from a checkpoint if it wasn't done before + // (after that we will be adding deltas from txmeta ledger entry changes) + nextLedgerSeq, checkPointFillErr, err := s.maybeFillEntriesFromCheckpoint(ctx, archive) + if err != nil { + return err + } + + prepareRangeCtx, cancelPrepareRange := context.WithTimeout(ctx, s.timeout) + if err := s.ledgerBackend.PrepareRange(prepareRangeCtx, backends.UnboundedRange(nextLedgerSeq)); err != nil { + cancelPrepareRange() + return err + } + cancelPrepareRange() + + // Make sure that the checkpoint prefill (if any), happened before starting to apply deltas + if err := <-checkPointFillErr; err != nil { + return err + } + + for ; ; nextLedgerSeq++ { + if err := s.ingest(ctx, nextLedgerSeq); err != nil { + return err + } + } +} + +func (s *Service) maybeFillEntriesFromCheckpoint(ctx context.Context, archive historyarchive.ArchiveInterface) (uint32, chan error, error) { + checkPointFillErr := make(chan error, 1) + // Skip creating a ledger-entry baseline if the DB was initialized + curLedgerSeq, err := s.db.GetLatestLedgerSequence(ctx) + if err == db.ErrEmptyDB { + var checkpointLedger uint32 + if root, rootErr := archive.GetRootHAS(); rootErr != nil { + return 0, checkPointFillErr, rootErr + } else if root.CurrentLedger == 0 { + return 0, checkPointFillErr, errEmptyArchives + } else { + checkpointLedger = root.CurrentLedger + } + + // DB is empty, let's fill it from the History Archive, using the latest available checkpoint + // Do it in parallel with the upcoming captive core preparation to save time + s.logger.Infof("found an empty database, creating ledger-entry baseline from the most recent checkpoint (%d). This can take up to 30 minutes, depending on the network", checkpointLedger) + panicGroup := util.UnrecoverablePanicGroup.Log(s.logger) + panicGroup.Go(func() { + checkPointFillErr <- s.fillEntriesFromCheckpoint(ctx, archive, checkpointLedger) + }) + return checkpointLedger + 1, checkPointFillErr, nil + } else if err != nil { + return 0, checkPointFillErr, err + } else { + checkPointFillErr <- nil + return curLedgerSeq + 1, checkPointFillErr, nil + } +} + +func (s *Service) fillEntriesFromCheckpoint(ctx context.Context, archive historyarchive.ArchiveInterface, checkpointLedger uint32) error { + checkpointCtx, cancelCheckpointCtx := context.WithTimeout(ctx, s.timeout) + defer cancelCheckpointCtx() + + reader, err := ingest.NewCheckpointChangeReader(checkpointCtx, archive, checkpointLedger) + if err != nil { + return err + } + + tx, err := s.db.NewTx(ctx) + if err != nil { + return err + } + transactionCommitted := false + defer func() { + if !transactionCommitted { + // Internally, we might already have rolled back the transaction. We should + // not generate benign error/warning here in case the transaction was already rolled back. + if rollbackErr := tx.Rollback(); rollbackErr != nil && rollbackErr != supportdb.ErrAlreadyRolledback { + s.logger.WithError(rollbackErr).Warn("could not rollback fillEntriesFromCheckpoint write transactions") + } + } + }() + + if err := s.ingestLedgerEntryChanges(ctx, reader, tx, ledgerEntryBaselineProgressLogPeriod); err != nil { + return err + } + if err := reader.Close(); err != nil { + return err + } + + s.logger.Info("committing checkpoint ledger entries") + err = tx.Commit(checkpointLedger) + transactionCommitted = true + if err != nil { + return err + } + + s.logger.Info("finished checkpoint processing") + return nil +} + +func (s *Service) ingest(ctx context.Context, sequence uint32) error { + startTime := time.Now() + s.logger.Infof("Ingesting ledger %d", sequence) + ledgerCloseMeta, err := s.ledgerBackend.GetLedger(ctx, sequence) + if err != nil { + return err + } + reader, err := ingest.NewLedgerChangeReaderFromLedgerCloseMeta(s.networkPassPhrase, ledgerCloseMeta) + if err != nil { + return err + } + tx, err := s.db.NewTx(ctx) + if err != nil { + return err + } + defer func() { + if err := tx.Rollback(); err != nil { + s.logger.WithError(err).Warn("could not rollback ingest write transactions") + } + }() + + if err := s.ingestLedgerEntryChanges(ctx, reader, tx, 0); err != nil { + return err + } + if err := reader.Close(); err != nil { + return err + } + + // EvictedTemporaryLedgerKeys will include both temporary ledger keys which + // have been evicted and their corresponding ttl ledger entries + evictedTempLedgerKeys, err := ledgerCloseMeta.EvictedTemporaryLedgerKeys() + if err != nil { + return err + } + if err := s.ingestTempLedgerEntryEvictions(ctx, evictedTempLedgerKeys, tx); err != nil { + return err + } + + if err := s.ingestLedgerCloseMeta(tx, ledgerCloseMeta); err != nil { + return err + } + + if err := tx.Commit(sequence); err != nil { + return err + } + s.logger.Debugf("Ingested ledger %d", sequence) + + s.ingestionDurationMetric. + With(prometheus.Labels{"type": "total"}).Observe(time.Since(startTime).Seconds()) + s.latestLedgerMetric.Set(float64(sequence)) + return nil +} + +func (s *Service) ingestLedgerCloseMeta(tx db.WriteTx, ledgerCloseMeta xdr.LedgerCloseMeta) error { + startTime := time.Now() + if err := tx.LedgerWriter().InsertLedger(ledgerCloseMeta); err != nil { + return err + } + s.ingestionDurationMetric. + With(prometheus.Labels{"type": "ledger_close_meta"}).Observe(time.Since(startTime).Seconds()) + + if err := s.eventStore.IngestEvents(ledgerCloseMeta); err != nil { + return err + } + + if err := s.transactionStore.IngestTransactions(ledgerCloseMeta); err != nil { + return err + } + return nil +} diff --git a/cmd/soroban-rpc/internal/ingest/service_test.go b/cmd/soroban-rpc/internal/ingest/service_test.go new file mode 100644 index 00000000..c2e4def0 --- /dev/null +++ b/cmd/soroban-rpc/internal/ingest/service_test.go @@ -0,0 +1,263 @@ +package ingest + +import ( + "context" + "encoding/hex" + "errors" + "sync" + "testing" + "time" + + "github.com/stellar/go/ingest/ledgerbackend" + "github.com/stellar/go/network" + supportlog "github.com/stellar/go/support/log" + "github.com/stellar/go/xdr" + "github.com/stretchr/testify/assert" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/daemon/interfaces" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/db" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/events" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/transactions" +) + +type ErrorReadWriter struct { +} + +func (rw *ErrorReadWriter) GetLatestLedgerSequence(ctx context.Context) (uint32, error) { + return 0, errors.New("could not get latest ledger sequence") +} +func (rw *ErrorReadWriter) NewTx(ctx context.Context) (db.WriteTx, error) { + return nil, errors.New("could not create new tx") +} + +func TestRetryRunningIngestion(t *testing.T) { + + var retryWg sync.WaitGroup + retryWg.Add(1) + + numRetries := 0 + var lastErr error + incrementRetry := func(err error, dur time.Duration) { + defer retryWg.Done() + numRetries++ + lastErr = err + } + config := Config{ + Logger: supportlog.New(), + DB: &ErrorReadWriter{}, + EventStore: nil, + TransactionStore: nil, + NetworkPassPhrase: "", + Archive: nil, + LedgerBackend: nil, + Timeout: time.Second, + OnIngestionRetry: incrementRetry, + Daemon: interfaces.MakeNoOpDeamon(), + } + service := NewService(config) + retryWg.Wait() + service.Close() + assert.Equal(t, 1, numRetries) + assert.Error(t, lastErr) + assert.ErrorContains(t, lastErr, "could not get latest ledger sequence") +} + +func TestIngestion(t *testing.T) { + mockDB := &MockDB{} + mockLedgerBackend := &ledgerbackend.MockDatabaseBackend{} + daemon := interfaces.MakeNoOpDeamon() + config := Config{ + Logger: supportlog.New(), + DB: mockDB, + EventStore: events.NewMemoryStore(daemon, network.TestNetworkPassphrase, 1), + TransactionStore: transactions.NewMemoryStore(daemon, network.TestNetworkPassphrase, 1), + LedgerBackend: mockLedgerBackend, + Daemon: daemon, + NetworkPassPhrase: network.TestNetworkPassphrase, + } + sequence := uint32(3) + service := newService(config) + mockTx := &MockTx{} + mockLedgerEntryWriter := &MockLedgerEntryWriter{} + mockLedgerWriter := &MockLedgerWriter{} + ctx := context.Background() + mockDB.On("NewTx", ctx).Return(mockTx, nil).Once() + mockTx.On("Commit", sequence).Return(nil).Once() + mockTx.On("Rollback").Return(nil).Once() + mockTx.On("LedgerEntryWriter").Return(mockLedgerEntryWriter).Twice() + mockTx.On("LedgerWriter").Return(mockLedgerWriter).Once() + + src := xdr.MustAddress("GBXGQJWVLWOYHFLVTKWV5FGHA3LNYY2JQKM7OAJAUEQFU6LPCSEFVXON") + firstTx := xdr.TransactionEnvelope{ + Type: xdr.EnvelopeTypeEnvelopeTypeTx, + V1: &xdr.TransactionV1Envelope{ + Tx: xdr.Transaction{ + Fee: 1, + SourceAccount: src.ToMuxedAccount(), + }, + }, + } + firstTxHash, err := network.HashTransactionInEnvelope(firstTx, network.TestNetworkPassphrase) + assert.NoError(t, err) + + baseFee := xdr.Int64(100) + tempKey := xdr.ScSymbol("TEMPKEY") + persistentKey := xdr.ScSymbol("TEMPVAL") + contractIDBytes, err := hex.DecodeString("df06d62447fd25da07c0135eed7557e5a5497ee7d15b7fe345bd47e191d8f577") + assert.NoError(t, err) + var contractID xdr.Hash + copy(contractID[:], contractIDBytes) + contractAddress := xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &contractID, + } + xdrTrue := true + operationChanges := xdr.LedgerEntryChanges{ + { + Type: xdr.LedgerEntryChangeTypeLedgerEntryState, + State: &xdr.LedgerEntry{ + LastModifiedLedgerSeq: 1, + Data: xdr.LedgerEntryData{ + Type: xdr.LedgerEntryTypeContractData, + ContractData: &xdr.ContractDataEntry{ + Contract: contractAddress, + Key: xdr.ScVal{ + Type: xdr.ScValTypeScvSymbol, + Sym: &persistentKey, + }, + Durability: xdr.ContractDataDurabilityPersistent, + Val: xdr.ScVal{ + Type: xdr.ScValTypeScvBool, + B: &xdrTrue, + }, + }, + }, + }, + }, + { + Type: xdr.LedgerEntryChangeTypeLedgerEntryUpdated, + Updated: &xdr.LedgerEntry{ + LastModifiedLedgerSeq: 1, + Data: xdr.LedgerEntryData{ + Type: xdr.LedgerEntryTypeContractData, + ContractData: &xdr.ContractDataEntry{ + Contract: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &contractID, + }, + Key: xdr.ScVal{ + Type: xdr.ScValTypeScvSymbol, + Sym: &persistentKey, + }, + Durability: xdr.ContractDataDurabilityPersistent, + Val: xdr.ScVal{ + Type: xdr.ScValTypeScvBool, + B: &xdrTrue, + }, + }, + }, + }, + }, + } + evictedPersistentLedgerEntry := xdr.LedgerEntry{ + LastModifiedLedgerSeq: 123, + Data: xdr.LedgerEntryData{ + Type: xdr.LedgerEntryTypeContractData, + ContractData: &xdr.ContractDataEntry{ + Contract: contractAddress, + Key: xdr.ScVal{ + Type: xdr.ScValTypeScvSymbol, + Sym: &persistentKey, + }, + Durability: xdr.ContractDataDurabilityTemporary, + Val: xdr.ScVal{ + Type: xdr.ScValTypeScvBool, + B: &xdrTrue, + }, + }, + }, + } + evictedTempLedgerKey := xdr.LedgerKey{ + Type: xdr.LedgerEntryTypeContractData, + ContractData: &xdr.LedgerKeyContractData{ + Contract: contractAddress, + Key: xdr.ScVal{ + Type: xdr.ScValTypeScvSymbol, + Sym: &tempKey, + }, + Durability: xdr.ContractDataDurabilityTemporary, + }, + } + ledger := xdr.LedgerCloseMeta{ + V: 1, + V1: &xdr.LedgerCloseMetaV1{ + LedgerHeader: xdr.LedgerHeaderHistoryEntry{Header: xdr.LedgerHeader{LedgerVersion: 10}}, + TxSet: xdr.GeneralizedTransactionSet{ + V: 1, + V1TxSet: &xdr.TransactionSetV1{ + PreviousLedgerHash: xdr.Hash{1, 2, 3}, + Phases: []xdr.TransactionPhase{ + { + V0Components: &[]xdr.TxSetComponent{ + { + Type: xdr.TxSetComponentTypeTxsetCompTxsMaybeDiscountedFee, + TxsMaybeDiscountedFee: &xdr.TxSetComponentTxsMaybeDiscountedFee{ + BaseFee: &baseFee, + Txs: []xdr.TransactionEnvelope{ + firstTx, + }, + }, + }, + }, + }, + }, + }, + }, + TxProcessing: []xdr.TransactionResultMeta{ + { + Result: xdr.TransactionResultPair{ + TransactionHash: firstTxHash, + Result: xdr.TransactionResult{ + Result: xdr.TransactionResultResult{ + Results: &[]xdr.OperationResult{}, + }, + }, + }, + FeeProcessing: xdr.LedgerEntryChanges{}, + TxApplyProcessing: xdr.TransactionMeta{ + V: 3, + V3: &xdr.TransactionMetaV3{ + Operations: []xdr.OperationMeta{ + { + Changes: operationChanges, + }, + }, + }, + }, + }, + }, + UpgradesProcessing: []xdr.UpgradeEntryMeta{}, + EvictedTemporaryLedgerKeys: []xdr.LedgerKey{evictedTempLedgerKey}, + EvictedPersistentLedgerEntries: []xdr.LedgerEntry{evictedPersistentLedgerEntry}, + }, + } + mockLedgerBackend.On("GetLedger", ctx, sequence). + Return(ledger, nil).Once() + mockLedgerEntryWriter.On("UpsertLedgerEntry", operationChanges[1].MustUpdated()). + Return(nil).Once() + evictedPresistentLedgerKey, err := evictedPersistentLedgerEntry.LedgerKey() + assert.NoError(t, err) + mockLedgerEntryWriter.On("DeleteLedgerEntry", evictedPresistentLedgerKey). + Return(nil).Once() + mockLedgerEntryWriter.On("DeleteLedgerEntry", evictedTempLedgerKey). + Return(nil).Once() + mockLedgerWriter.On("InsertLedger", ledger). + Return(nil).Once() + assert.NoError(t, service.ingest(ctx, sequence)) + + mockDB.AssertExpectations(t) + mockTx.AssertExpectations(t) + mockLedgerEntryWriter.AssertExpectations(t) + mockLedgerWriter.AssertExpectations(t) + mockLedgerBackend.AssertExpectations(t) +} diff --git a/cmd/soroban-rpc/internal/jsonrpc.go b/cmd/soroban-rpc/internal/jsonrpc.go new file mode 100644 index 00000000..4bc4f17c --- /dev/null +++ b/cmd/soroban-rpc/internal/jsonrpc.go @@ -0,0 +1,298 @@ +package internal + +import ( + "context" + "encoding/json" + "net/http" + "strconv" + "strings" + "time" + + "github.com/creachadair/jrpc2" + "github.com/creachadair/jrpc2/handler" + "github.com/creachadair/jrpc2/jhttp" + "github.com/go-chi/chi/middleware" + "github.com/prometheus/client_golang/prometheus" + "github.com/rs/cors" + "github.com/stellar/go/support/log" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/config" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/daemon/interfaces" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/db" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/events" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/methods" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/network" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/transactions" +) + +// maxHTTPRequestSize defines the largest request size that the http handler +// would be willing to accept before dropping the request. The implementation +// uses the default MaxBytesHandler to limit the request size. +const maxHTTPRequestSize = 512 * 1024 // half a megabyte + +// Handler is the HTTP handler which serves the Soroban JSON RPC responses +type Handler struct { + bridge jhttp.Bridge + logger *log.Entry + http.Handler +} + +// Close closes all the resources held by the Handler instances. +// After Close is called the Handler instance will stop accepting JSON RPC requests. +func (h Handler) Close() { + if err := h.bridge.Close(); err != nil { + h.logger.WithError(err).Warn("could not close bridge") + } +} + +type HandlerParams struct { + EventStore *events.MemoryStore + TransactionStore *transactions.MemoryStore + LedgerEntryReader db.LedgerEntryReader + LedgerReader db.LedgerReader + Logger *log.Entry + PreflightGetter methods.PreflightGetter + Daemon interfaces.Daemon +} + +func decorateHandlers(daemon interfaces.Daemon, logger *log.Entry, m handler.Map) handler.Map { + requestMetric := prometheus.NewSummaryVec(prometheus.SummaryOpts{ + Namespace: daemon.MetricsNamespace(), + Subsystem: "json_rpc", + Name: "request_duration_seconds", + Help: "JSON RPC request duration", + Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001}, + }, []string{"endpoint", "status"}) + decorated := handler.Map{} + for endpoint, h := range m { + // create copy of h so it can be used in closure bleow + h := h + decorated[endpoint] = handler.New(func(ctx context.Context, r *jrpc2.Request) (interface{}, error) { + reqID := strconv.FormatUint(middleware.NextRequestID(), 10) + logRequest(logger, reqID, r) + startTime := time.Now() + result, err := h(ctx, r) + duration := time.Since(startTime) + label := prometheus.Labels{"endpoint": r.Method(), "status": "ok"} + simulateTransactionResponse, ok := result.(methods.SimulateTransactionResponse) + if ok && simulateTransactionResponse.Error != "" { + label["status"] = "error" + } else if err != nil { + if jsonRPCErr, ok := err.(*jrpc2.Error); ok { + prometheusLabelReplacer := strings.NewReplacer(" ", "_", "-", "_", "(", "", ")", "") + status := prometheusLabelReplacer.Replace(jsonRPCErr.Code.String()) + label["status"] = status + } + } + requestMetric.With(label).Observe(duration.Seconds()) + logResponse(logger, reqID, duration, label["status"], result) + return result, err + }) + } + daemon.MetricsRegistry().MustRegister(requestMetric) + return decorated +} + +func logRequest(logger *log.Entry, reqID string, req *jrpc2.Request) { + logger = logger.WithFields(log.F{ + "subsys": "jsonrpc", + "req": reqID, + "json_req": req.ID(), + "method": req.Method(), + }) + logger.Info("starting JSONRPC request") + + // Params are useful but can be really verbose, let's only print them in debug level + logger = logger.WithField("params", req.ParamString()) + logger.Debug("starting JSONRPC request params") +} + +func logResponse(logger *log.Entry, reqID string, duration time.Duration, status string, response any) { + logger = logger.WithFields(log.F{ + "subsys": "jsonrpc", + "req": reqID, + "duration": duration.String(), + "json_req": reqID, + "status": status, + }) + logger.Info("finished JSONRPC request") + + if status == "ok" { + responseBytes, err := json.Marshal(response) + if err == nil { + // the result is useful but can be really verbose, let's only print it with debug level + logger = logger.WithField("result", string(responseBytes)) + logger.Debug("finished JSONRPC request result") + } + } +} + +// NewJSONRPCHandler constructs a Handler instance +func NewJSONRPCHandler(cfg *config.Config, params HandlerParams) Handler { + bridgeOptions := jhttp.BridgeOptions{ + Server: &jrpc2.ServerOptions{ + Logger: func(text string) { params.Logger.Debug(text) }, + }, + } + handlers := []struct { + methodName string + underlyingHandler jrpc2.Handler + queueLimit uint + longName string + requestDurationLimit time.Duration + }{ + { + methodName: "getHealth", + underlyingHandler: methods.NewHealthCheck(params.TransactionStore, cfg.MaxHealthyLedgerLatency), + longName: "get_health", + queueLimit: cfg.RequestBacklogGetHealthQueueLimit, + requestDurationLimit: cfg.MaxGetHealthExecutionDuration, + }, + { + methodName: "getEvents", + underlyingHandler: methods.NewGetEventsHandler(params.EventStore, cfg.MaxEventsLimit, cfg.DefaultEventsLimit), + longName: "get_events", + queueLimit: cfg.RequestBacklogGetEventsQueueLimit, + requestDurationLimit: cfg.MaxGetEventsExecutionDuration, + }, + { + methodName: "getNetwork", + underlyingHandler: methods.NewGetNetworkHandler(params.Daemon, cfg.NetworkPassphrase, cfg.FriendbotURL), + longName: "get_network", + queueLimit: cfg.RequestBacklogGetNetworkQueueLimit, + requestDurationLimit: cfg.MaxGetNetworkExecutionDuration, + }, + { + methodName: "getLatestLedger", + underlyingHandler: methods.NewGetLatestLedgerHandler(params.LedgerEntryReader, params.LedgerReader), + longName: "get_latest_ledger", + queueLimit: cfg.RequestBacklogGetLatestLedgerQueueLimit, + requestDurationLimit: cfg.MaxGetLatestLedgerExecutionDuration, + }, + { + methodName: "getLedgerEntry", + underlyingHandler: methods.NewGetLedgerEntryHandler(params.Logger, params.LedgerEntryReader), + longName: "get_ledger_entry", + queueLimit: cfg.RequestBacklogGetLedgerEntriesQueueLimit, // share with getLedgerEntries + requestDurationLimit: cfg.MaxGetLedgerEntriesExecutionDuration, + }, + { + methodName: "getLedgerEntries", + underlyingHandler: methods.NewGetLedgerEntriesHandler(params.Logger, params.LedgerEntryReader), + longName: "get_ledger_entries", + queueLimit: cfg.RequestBacklogGetLedgerEntriesQueueLimit, + requestDurationLimit: cfg.MaxGetLedgerEntriesExecutionDuration, + }, + { + methodName: "getTransaction", + underlyingHandler: methods.NewGetTransactionHandler(params.TransactionStore), + longName: "get_transaction", + queueLimit: cfg.RequestBacklogGetTransactionQueueLimit, + requestDurationLimit: cfg.MaxGetTransactionExecutionDuration, + }, + { + methodName: "sendTransaction", + underlyingHandler: methods.NewSendTransactionHandler(params.Daemon, params.Logger, params.TransactionStore, cfg.NetworkPassphrase), + longName: "send_transaction", + queueLimit: cfg.RequestBacklogSendTransactionQueueLimit, + requestDurationLimit: cfg.MaxSendTransactionExecutionDuration, + }, + { + methodName: "simulateTransaction", + underlyingHandler: methods.NewSimulateTransactionHandler(params.Logger, params.LedgerEntryReader, params.LedgerReader, params.PreflightGetter), + longName: "simulate_transaction", + queueLimit: cfg.RequestBacklogSimulateTransactionQueueLimit, + requestDurationLimit: cfg.MaxSimulateTransactionExecutionDuration, + }, + } + handlersMap := handler.Map{} + for _, handler := range handlers { + queueLimiterGaugeName := handler.longName + "_inflight_requests" + queueLimiterGaugeHelp := "Number of concurrenty in-flight " + handler.methodName + " requests" + + queueLimiterGauge := prometheus.NewGauge(prometheus.GaugeOpts{ + Namespace: params.Daemon.MetricsNamespace(), Subsystem: "network", + Name: queueLimiterGaugeName, + Help: queueLimiterGaugeHelp, + }) + queueLimiter := network.MakeJrpcBacklogQueueLimiter( + handler.underlyingHandler, + queueLimiterGauge, + uint64(handler.queueLimit), + params.Logger) + + durationWarnCounterName := handler.longName + "_execution_threshold_warning" + durationLimitCounterName := handler.longName + "_execution_threshold_limit" + durationWarnCounterHelp := "The metric measures the count of " + handler.methodName + " requests that surpassed the warning threshold for execution time" + durationLimitCounterHelp := "The metric measures the count of " + handler.methodName + " requests that surpassed the limit threshold for execution time" + + requestDurationWarnCounter := prometheus.NewCounter(prometheus.CounterOpts{ + Namespace: params.Daemon.MetricsNamespace(), Subsystem: "network", + Name: durationWarnCounterName, + Help: durationWarnCounterHelp, + }) + requestDurationLimitCounter := prometheus.NewCounter(prometheus.CounterOpts{ + Namespace: params.Daemon.MetricsNamespace(), Subsystem: "network", + Name: durationLimitCounterName, + Help: durationLimitCounterHelp, + }) + // set the warning threshold to be one third of the limit. + requestDurationWarn := handler.requestDurationLimit / 3 + durationLimiter := network.MakeJrpcRequestDurationLimiter( + queueLimiter.Handle, + requestDurationWarn, + handler.requestDurationLimit, + requestDurationWarnCounter, + requestDurationLimitCounter, + params.Logger) + handlersMap[handler.methodName] = durationLimiter.Handle + } + bridge := jhttp.NewBridge(decorateHandlers( + params.Daemon, + params.Logger, + handlersMap), + &bridgeOptions) + + // globalQueueRequestBacklogLimiter is a metric for measuring the total concurrent inflight requests + globalQueueRequestBacklogLimiter := prometheus.NewGauge(prometheus.GaugeOpts{ + Namespace: params.Daemon.MetricsNamespace(), Subsystem: "network", Name: "global_inflight_requests", + Help: "Number of concurrenty in-flight http requests", + }) + + queueLimitedBridge := network.MakeHTTPBacklogQueueLimiter( + bridge, + globalQueueRequestBacklogLimiter, + uint64(cfg.RequestBacklogGlobalQueueLimit), + params.Logger) + + globalQueueRequestExecutionDurationWarningCounter := prometheus.NewCounter(prometheus.CounterOpts{ + Namespace: params.Daemon.MetricsNamespace(), Subsystem: "network", Name: "global_request_execution_duration_threshold_warning", + Help: "The metric measures the count of requests that surpassed the warning threshold for execution time", + }) + globalQueueRequestExecutionDurationLimitCounter := prometheus.NewCounter(prometheus.CounterOpts{ + Namespace: params.Daemon.MetricsNamespace(), Subsystem: "network", Name: "global_request_execution_duration_threshold_limit", + Help: "The metric measures the count of requests that surpassed the limit threshold for execution time", + }) + var handler http.Handler = network.MakeHTTPRequestDurationLimiter( + queueLimitedBridge, + cfg.RequestExecutionWarningThreshold, + cfg.MaxRequestExecutionDuration, + globalQueueRequestExecutionDurationWarningCounter, + globalQueueRequestExecutionDurationLimitCounter, + params.Logger) + + handler = http.MaxBytesHandler(handler, maxHTTPRequestSize) + + corsMiddleware := cors.New(cors.Options{ + AllowedOrigins: []string{}, + AllowOriginRequestFunc: func(*http.Request, string) bool { return true }, + AllowedHeaders: []string{"*"}, + AllowedMethods: []string{"GET", "PUT", "POST", "PATCH", "DELETE", "HEAD", "OPTIONS"}, + }) + + return Handler{ + bridge: bridge, + logger: params.Logger, + Handler: corsMiddleware.Handler(handler), + } +} diff --git a/cmd/soroban-rpc/internal/ledgerbucketwindow/ledgerbucketwindow.go b/cmd/soroban-rpc/internal/ledgerbucketwindow/ledgerbucketwindow.go new file mode 100644 index 00000000..0d447e71 --- /dev/null +++ b/cmd/soroban-rpc/internal/ledgerbucketwindow/ledgerbucketwindow.go @@ -0,0 +1,76 @@ +package ledgerbucketwindow + +import ( + "fmt" +) + +// LedgerBucketWindow is a sequence of buckets associated to a ledger window. +type LedgerBucketWindow[T any] struct { + // buckets is a circular buffer where each cell represents + // the content stored for a specific ledger. + buckets []LedgerBucket[T] + // start is the index of the head in the circular buffer. + start uint32 +} + +// LedgerBucket holds the content associated to a ledger +type LedgerBucket[T any] struct { + LedgerSeq uint32 + LedgerCloseTimestamp int64 + BucketContent T +} + +// DefaultEventLedgerRetentionWindow represents the max number of ledgers we would like to keep +// an incoming event in memory. The value was calculated to align with (roughly) 24 hours window. +const DefaultEventLedgerRetentionWindow = 17280 + +// NewLedgerBucketWindow creates a new LedgerBucketWindow +func NewLedgerBucketWindow[T any](retentionWindow uint32) *LedgerBucketWindow[T] { + if retentionWindow == 0 { + retentionWindow = DefaultEventLedgerRetentionWindow + } + return &LedgerBucketWindow[T]{ + buckets: make([]LedgerBucket[T], 0, retentionWindow), + } +} + +// Append adds a new bucket to the window. If the window is full a bucket will be evicted and returned. +func (w *LedgerBucketWindow[T]) Append(bucket LedgerBucket[T]) *LedgerBucket[T] { + length := w.Len() + if length > 0 { + expectedLedgerSequence := w.buckets[w.start].LedgerSeq + length + if expectedLedgerSequence != bucket.LedgerSeq { + panic(fmt.Errorf("ledgers not contiguous: expected ledger sequence %v but received %v", expectedLedgerSequence, bucket.LedgerSeq)) + } + } + + var evicted *LedgerBucket[T] + if length < uint32(cap(w.buckets)) { + // The buffer isn't full, just place the bucket at the end + w.buckets = append(w.buckets, bucket) + } else { + // overwrite the first bucket and shift the circular buffer so that it + // becomes the last bucket + saved := w.buckets[w.start] + evicted = &saved + w.buckets[w.start] = bucket + w.start = (w.start + 1) % length + } + + return evicted +} + +// Len returns the length (number of buckets in the window) +func (w *LedgerBucketWindow[T]) Len() uint32 { + return uint32(len(w.buckets)) +} + +// Get obtains a bucket from the window +func (w *LedgerBucketWindow[T]) Get(i uint32) *LedgerBucket[T] { + length := w.Len() + if i >= length { + panic(fmt.Errorf("index out of range [%d] with length %d", i, length)) + } + index := (w.start + i) % length + return &w.buckets[index] +} diff --git a/cmd/soroban-rpc/internal/ledgerbucketwindow/ledgerbucketwindow_test.go b/cmd/soroban-rpc/internal/ledgerbucketwindow/ledgerbucketwindow_test.go new file mode 100644 index 00000000..b472af0b --- /dev/null +++ b/cmd/soroban-rpc/internal/ledgerbucketwindow/ledgerbucketwindow_test.go @@ -0,0 +1,135 @@ +package ledgerbucketwindow + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func bucket(ledgerSeq uint32) LedgerBucket[uint32] { + return LedgerBucket[uint32]{ + LedgerSeq: ledgerSeq, + LedgerCloseTimestamp: int64(ledgerSeq)*25 + 100, + BucketContent: ledgerSeq, + } +} + +func TestAppend(t *testing.T) { + m := NewLedgerBucketWindow[uint32](3) + require.Equal(t, uint32(0), m.Len()) + + // test appending first bucket of events + evicted := m.Append(bucket(5)) + require.Nil(t, evicted) + require.Equal(t, uint32(1), m.Len()) + require.Equal(t, bucket(5), *m.Get(0)) + + // the next bucket must follow the previous bucket (ledger 5) + + require.PanicsWithError( + t, "ledgers not contiguous: expected ledger sequence 6 but received 10", + func() { + m.Append(LedgerBucket[uint32]{ + LedgerSeq: 10, + LedgerCloseTimestamp: 100, + BucketContent: 10, + }) + }, + ) + require.PanicsWithError( + t, "ledgers not contiguous: expected ledger sequence 6 but received 4", + func() { + m.Append(LedgerBucket[uint32]{ + LedgerSeq: 4, + LedgerCloseTimestamp: 100, + BucketContent: 4, + }) + }, + ) + require.PanicsWithError( + t, "ledgers not contiguous: expected ledger sequence 6 but received 5", + func() { + m.Append(LedgerBucket[uint32]{ + LedgerSeq: 5, + LedgerCloseTimestamp: 100, + BucketContent: 5, + }) + }, + ) + // check that none of the calls above modified our buckets + require.Equal(t, uint32(1), m.Len()) + require.Equal(t, bucket(5), *m.Get(0)) + + // append ledger 6 bucket, now we have two buckets filled + evicted = m.Append(bucket(6)) + require.Nil(t, evicted) + require.Equal(t, uint32(2), m.Len()) + require.Equal(t, bucket(5), *m.Get(0)) + require.Equal(t, bucket(6), *m.Get(1)) + + // the next bucket of events must follow the previous bucket (ledger 6) + require.PanicsWithError( + t, "ledgers not contiguous: expected ledger sequence 7 but received 10", + func() { + m.Append(LedgerBucket[uint32]{ + LedgerSeq: 10, + LedgerCloseTimestamp: 100, + BucketContent: 10, + }) + }, + ) + require.PanicsWithError( + t, "ledgers not contiguous: expected ledger sequence 7 but received 4", + func() { + m.Append(LedgerBucket[uint32]{ + LedgerSeq: 4, + LedgerCloseTimestamp: 100, + BucketContent: 4, + }) + }, + ) + require.PanicsWithError( + t, "ledgers not contiguous: expected ledger sequence 7 but received 5", + func() { + m.Append(LedgerBucket[uint32]{ + LedgerSeq: 5, + LedgerCloseTimestamp: 100, + BucketContent: 5, + }) + }, + ) + + // append ledger 7, now we have all three buckets filled + evicted = m.Append(bucket(7)) + require.Nil(t, evicted) + require.Nil(t, evicted) + require.Equal(t, uint32(3), m.Len()) + require.Equal(t, bucket(5), *m.Get(0)) + require.Equal(t, bucket(6), *m.Get(1)) + require.Equal(t, bucket(7), *m.Get(2)) + + // append ledger 8, but all buckets are full, so we need to evict ledger 5 + evicted = m.Append(bucket(8)) + require.Equal(t, bucket(5), *evicted) + require.Equal(t, uint32(3), m.Len()) + require.Equal(t, bucket(6), *m.Get(0)) + require.Equal(t, bucket(7), *m.Get(1)) + require.Equal(t, bucket(8), *m.Get(2)) + + // append ledger 9 events, but all buckets are full, so we need to evict ledger 6 + evicted = m.Append(bucket(9)) + require.Equal(t, bucket(6), *evicted) + require.Equal(t, uint32(3), m.Len()) + require.Equal(t, bucket(7), *m.Get(0)) + require.Equal(t, bucket(8), *m.Get(1)) + require.Equal(t, bucket(9), *m.Get(2)) + + // append ledger 10, but all buckets are full, so we need to evict ledger 7. + // The start index must have wrapped around + evicted = m.Append(bucket(10)) + require.Equal(t, bucket(7), *evicted) + require.Equal(t, uint32(3), m.Len()) + require.Equal(t, bucket(8), *m.Get(0)) + require.Equal(t, bucket(9), *m.Get(1)) + require.Equal(t, bucket(10), *m.Get(2)) +} diff --git a/cmd/soroban-rpc/internal/methods/get_events.go b/cmd/soroban-rpc/internal/methods/get_events.go new file mode 100644 index 00000000..e5bf3628 --- /dev/null +++ b/cmd/soroban-rpc/internal/methods/get_events.go @@ -0,0 +1,431 @@ +package methods + +import ( + "context" + "encoding/json" + "fmt" + "strings" + "time" + + "github.com/creachadair/jrpc2" + "github.com/creachadair/jrpc2/handler" + + "github.com/stellar/go/strkey" + "github.com/stellar/go/support/errors" + "github.com/stellar/go/xdr" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/events" +) + +type eventTypeSet map[string]interface{} + +func (e eventTypeSet) valid() error { + for key := range e { + switch key { + case EventTypeSystem, EventTypeContract, EventTypeDiagnostic: + // ok + default: + return errors.New("if set, type must be either 'system', 'contract' or 'diagnostic'") + } + } + return nil +} + +func (e *eventTypeSet) UnmarshalJSON(data []byte) error { + if len(data) == 0 { + *e = map[string]interface{}{} + return nil + } + var joined string + if err := json.Unmarshal(data, &joined); err != nil { + return err + } + *e = map[string]interface{}{} + if len(joined) == 0 { + return nil + } + for _, key := range strings.Split(joined, ",") { + (*e)[key] = nil + } + return nil +} + +func (e eventTypeSet) MarshalJSON() ([]byte, error) { + var keys []string + for key := range e { + keys = append(keys, key) + } + return json.Marshal(strings.Join(keys, ",")) +} + +func (e eventTypeSet) matches(event xdr.ContractEvent) bool { + if len(e) == 0 { + return true + } + _, ok := e[eventTypeFromXDR[event.Type]] + return ok +} + +type EventInfo struct { + EventType string `json:"type"` + Ledger int32 `json:"ledger"` + LedgerClosedAt string `json:"ledgerClosedAt"` + ContractID string `json:"contractId"` + ID string `json:"id"` + PagingToken string `json:"pagingToken"` + Topic []string `json:"topic"` + Value string `json:"value"` + InSuccessfulContractCall bool `json:"inSuccessfulContractCall"` +} + +type GetEventsRequest struct { + StartLedger uint32 `json:"startLedger,omitempty"` + Filters []EventFilter `json:"filters"` + Pagination *PaginationOptions `json:"pagination,omitempty"` +} + +func (g *GetEventsRequest) Valid(maxLimit uint) error { + // Validate start + // Validate the paging limit (if it exists) + if g.Pagination != nil && g.Pagination.Cursor != nil { + if g.StartLedger != 0 { + return errors.New("startLedger and cursor cannot both be set") + } + } else if g.StartLedger <= 0 { + return errors.New("startLedger must be positive") + } + if g.Pagination != nil && g.Pagination.Limit > maxLimit { + return fmt.Errorf("limit must not exceed %d", maxLimit) + } + + // Validate filters + if len(g.Filters) > 5 { + return errors.New("maximum 5 filters per request") + } + for i, filter := range g.Filters { + if err := filter.Valid(); err != nil { + return errors.Wrapf(err, "filter %d invalid", i+1) + } + } + + return nil +} + +func (g *GetEventsRequest) Matches(event xdr.DiagnosticEvent) bool { + if len(g.Filters) == 0 { + return true + } + for _, filter := range g.Filters { + if filter.Matches(event) { + return true + } + } + return false +} + +const EventTypeSystem = "system" +const EventTypeContract = "contract" +const EventTypeDiagnostic = "diagnostic" + +var eventTypeFromXDR = map[xdr.ContractEventType]string{ + xdr.ContractEventTypeSystem: EventTypeSystem, + xdr.ContractEventTypeContract: EventTypeContract, + xdr.ContractEventTypeDiagnostic: EventTypeDiagnostic, +} + +type EventFilter struct { + EventType eventTypeSet `json:"type,omitempty"` + ContractIDs []string `json:"contractIds,omitempty"` + Topics []TopicFilter `json:"topics,omitempty"` +} + +func (e *EventFilter) Valid() error { + if err := e.EventType.valid(); err != nil { + return errors.Wrap(err, "filter type invalid") + } + if len(e.ContractIDs) > 5 { + return errors.New("maximum 5 contract IDs per filter") + } + if len(e.Topics) > 5 { + return errors.New("maximum 5 topics per filter") + } + for i, id := range e.ContractIDs { + _, err := strkey.Decode(strkey.VersionByteContract, id) + if err != nil { + return fmt.Errorf("contract ID %d invalid", i+1) + } + } + for i, topic := range e.Topics { + if err := topic.Valid(); err != nil { + return errors.Wrapf(err, "topic %d invalid", i+1) + } + } + return nil +} + +func (e *EventFilter) Matches(event xdr.DiagnosticEvent) bool { + return e.EventType.matches(event.Event) && e.matchesContractIDs(event.Event) && e.matchesTopics(event.Event) +} + +func (e *EventFilter) matchesContractIDs(event xdr.ContractEvent) bool { + if len(e.ContractIDs) == 0 { + return true + } + if event.ContractId == nil { + return false + } + needle := strkey.MustEncode(strkey.VersionByteContract, (*event.ContractId)[:]) + for _, id := range e.ContractIDs { + if id == needle { + return true + } + } + return false +} + +func (e *EventFilter) matchesTopics(event xdr.ContractEvent) bool { + if len(e.Topics) == 0 { + return true + } + v0, ok := event.Body.GetV0() + if !ok { + return false + } + for _, topicFilter := range e.Topics { + if topicFilter.Matches(v0.Topics) { + return true + } + } + return false +} + +type TopicFilter []SegmentFilter + +const minTopicCount = 1 +const maxTopicCount = 4 + +func (t *TopicFilter) Valid() error { + if len(*t) < minTopicCount { + return errors.New("topic must have at least one segment") + } + if len(*t) > maxTopicCount { + return errors.New("topic cannot have more than 4 segments") + } + for i, segment := range *t { + if err := segment.Valid(); err != nil { + return errors.Wrapf(err, "segment %d invalid", i+1) + } + } + return nil +} + +// An event matches a topic filter iff: +// - the event has EXACTLY as many topic segments as the filter AND +// - each segment either: matches exactly OR is a wildcard. +func (t TopicFilter) Matches(event []xdr.ScVal) bool { + if len(event) != len(t) { + return false + } + + for i, segmentFilter := range t { + if !segmentFilter.Matches(event[i]) { + return false + } + } + + return true +} + +type SegmentFilter struct { + wildcard *string + scval *xdr.ScVal +} + +func (s *SegmentFilter) Matches(segment xdr.ScVal) bool { + if s.wildcard != nil && *s.wildcard == "*" { + return true + } else if s.scval != nil { + if !s.scval.Equals(segment) { + return false + } + } else { + panic("invalid segmentFilter") + } + + return true +} + +func (s *SegmentFilter) Valid() error { + if s.wildcard != nil && s.scval != nil { + return errors.New("cannot set both wildcard and scval") + } + if s.wildcard == nil && s.scval == nil { + return errors.New("must set either wildcard or scval") + } + if s.wildcard != nil && *s.wildcard != "*" { + return errors.New("wildcard must be '*'") + } + return nil +} + +func (s *SegmentFilter) UnmarshalJSON(p []byte) error { + s.wildcard = nil + s.scval = nil + + var tmp string + if err := json.Unmarshal(p, &tmp); err != nil { + return err + } + if tmp == "*" { + s.wildcard = &tmp + } else { + var out xdr.ScVal + if err := xdr.SafeUnmarshalBase64(tmp, &out); err != nil { + return err + } + s.scval = &out + } + return nil +} + +type PaginationOptions struct { + Cursor *events.Cursor `json:"cursor,omitempty"` + Limit uint `json:"limit,omitempty"` +} + +type GetEventsResponse struct { + Events []EventInfo `json:"events"` + LatestLedger int64 `json:"latestLedger"` +} + +type eventScanner interface { + Scan(eventRange events.Range, f func(xdr.DiagnosticEvent, events.Cursor, int64) bool) (uint32, error) +} + +type eventsRPCHandler struct { + scanner eventScanner + maxLimit uint + defaultLimit uint +} + +func (h eventsRPCHandler) getEvents(request GetEventsRequest) (GetEventsResponse, error) { + if err := request.Valid(h.maxLimit); err != nil { + return GetEventsResponse{}, &jrpc2.Error{ + Code: jrpc2.InvalidParams, + Message: err.Error(), + } + } + + start := events.Cursor{Ledger: uint32(request.StartLedger)} + limit := h.defaultLimit + if request.Pagination != nil { + if request.Pagination.Cursor != nil { + start = *request.Pagination.Cursor + // increment event index because, when paginating, + // we start with the item right after the cursor + start.Event++ + } + if request.Pagination.Limit > 0 { + limit = request.Pagination.Limit + } + } + + type entry struct { + cursor events.Cursor + ledgerCloseTimestamp int64 + event xdr.DiagnosticEvent + } + var found []entry + latestLedger, err := h.scanner.Scan( + events.Range{ + Start: start, + ClampStart: false, + End: events.MaxCursor, + ClampEnd: true, + }, + func(event xdr.DiagnosticEvent, cursor events.Cursor, ledgerCloseTimestamp int64) bool { + if request.Matches(event) { + found = append(found, entry{cursor, ledgerCloseTimestamp, event}) + } + return uint(len(found)) < limit + }, + ) + if err != nil { + return GetEventsResponse{}, &jrpc2.Error{ + Code: jrpc2.InvalidRequest, + Message: err.Error(), + } + } + + results := []EventInfo{} + for _, entry := range found { + info, err := eventInfoForEvent( + entry.event, + entry.cursor, + time.Unix(entry.ledgerCloseTimestamp, 0).UTC().Format(time.RFC3339), + ) + if err != nil { + return GetEventsResponse{}, errors.Wrap(err, "could not parse event") + } + results = append(results, info) + } + return GetEventsResponse{ + LatestLedger: int64(latestLedger), + Events: results, + }, nil +} + +func eventInfoForEvent(event xdr.DiagnosticEvent, cursor events.Cursor, ledgerClosedAt string) (EventInfo, error) { + v0, ok := event.Event.Body.GetV0() + if !ok { + return EventInfo{}, errors.New("unknown event version") + } + + eventType, ok := eventTypeFromXDR[event.Event.Type] + if !ok { + return EventInfo{}, fmt.Errorf("unknown XDR ContractEventType type: %d", event.Event.Type) + } + + // base64-xdr encode the topic + topic := make([]string, 0, 4) + for _, segment := range v0.Topics { + seg, err := xdr.MarshalBase64(segment) + if err != nil { + return EventInfo{}, err + } + topic = append(topic, seg) + } + + // base64-xdr encode the data + data, err := xdr.MarshalBase64(v0.Data) + if err != nil { + return EventInfo{}, err + } + + info := EventInfo{ + EventType: eventType, + Ledger: int32(cursor.Ledger), + LedgerClosedAt: ledgerClosedAt, + ID: cursor.String(), + PagingToken: cursor.String(), + Topic: topic, + Value: data, + InSuccessfulContractCall: event.InSuccessfulContractCall, + } + if event.Event.ContractId != nil { + info.ContractID = strkey.MustEncode(strkey.VersionByteContract, (*event.Event.ContractId)[:]) + } + return info, nil +} + +// NewGetEventsHandler returns a json rpc handler to fetch and filter events +func NewGetEventsHandler(eventsStore *events.MemoryStore, maxLimit, defaultLimit uint) jrpc2.Handler { + eventsHandler := eventsRPCHandler{ + scanner: eventsStore, + maxLimit: maxLimit, + defaultLimit: defaultLimit, + } + return handler.New(func(ctx context.Context, request GetEventsRequest) (GetEventsResponse, error) { + return eventsHandler.getEvents(request) + }) +} diff --git a/cmd/soroban-rpc/internal/methods/get_events_test.go b/cmd/soroban-rpc/internal/methods/get_events_test.go new file mode 100644 index 00000000..4d15e2c0 --- /dev/null +++ b/cmd/soroban-rpc/internal/methods/get_events_test.go @@ -0,0 +1,1189 @@ +package methods + +import ( + "encoding/json" + "fmt" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/stellar/go/keypair" + "github.com/stellar/go/network" + "github.com/stellar/go/strkey" + "github.com/stellar/go/xdr" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/daemon/interfaces" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/events" +) + +func TestEventTypeSetMatches(t *testing.T) { + var defaultSet eventTypeSet + + all := eventTypeSet{} + all[EventTypeContract] = nil + all[EventTypeDiagnostic] = nil + all[EventTypeSystem] = nil + + onlyContract := eventTypeSet{} + onlyContract[EventTypeContract] = nil + + contractEvent := xdr.ContractEvent{Type: xdr.ContractEventTypeContract} + diagnosticEvent := xdr.ContractEvent{Type: xdr.ContractEventTypeDiagnostic} + systemEvent := xdr.ContractEvent{Type: xdr.ContractEventTypeSystem} + + for _, testCase := range []struct { + name string + set eventTypeSet + event xdr.ContractEvent + matches bool + }{ + { + "all matches Contract events", + all, + contractEvent, + true, + }, + { + "all matches System events", + all, + systemEvent, + true, + }, + { + "all matches Diagnostic events", + all, + systemEvent, + true, + }, + { + "defaultSet matches Contract events", + defaultSet, + contractEvent, + true, + }, + { + "defaultSet matches System events", + defaultSet, + systemEvent, + true, + }, + { + "defaultSet matches Diagnostic events", + defaultSet, + systemEvent, + true, + }, + { + "onlyContract set matches Contract events", + onlyContract, + contractEvent, + true, + }, + { + "onlyContract does not match System events", + onlyContract, + systemEvent, + false, + }, + { + "onlyContract does not match Diagnostic events", + defaultSet, + diagnosticEvent, + true, + }, + } { + t.Run(testCase.name, func(t *testing.T) { + assert.Equal(t, testCase.matches, testCase.set.matches(testCase.event)) + }) + } +} + +func TestEventTypeSetValid(t *testing.T) { + for _, testCase := range []struct { + name string + keys []string + expectedError bool + }{ + { + "empty set", + []string{}, + false, + }, + { + "set with one valid element", + []string{EventTypeSystem}, + false, + }, + { + "set with two valid elements", + []string{EventTypeSystem, EventTypeContract}, + false, + }, + { + "set with three valid elements", + []string{EventTypeSystem, EventTypeContract, EventTypeDiagnostic}, + false, + }, + { + "set with one invalid element", + []string{"abc"}, + true, + }, + { + "set with multiple invalid elements", + []string{"abc", "def"}, + true, + }, + { + "set with valid elements mixed with invalid elements", + []string{EventTypeSystem, "abc"}, + true, + }, + } { + t.Run(testCase.name, func(t *testing.T) { + set := eventTypeSet{} + for _, key := range testCase.keys { + set[key] = nil + } + if testCase.expectedError { + assert.Error(t, set.valid()) + } else { + assert.NoError(t, set.valid()) + } + }) + } +} + +func TestEventTypeSetMarshaling(t *testing.T) { + for _, testCase := range []struct { + name string + input string + expected []string + }{ + { + "empty set", + "", + []string{}, + }, + { + "set with one element", + "a", + []string{"a"}, + }, + { + "set with more than one element", + "a,b,c", + []string{"a", "b", "c"}, + }, + } { + t.Run(testCase.name, func(t *testing.T) { + var set eventTypeSet + input, err := json.Marshal(testCase.input) + assert.NoError(t, err) + err = set.UnmarshalJSON(input) + assert.NoError(t, err) + assert.Equal(t, len(testCase.expected), len(set)) + for _, val := range testCase.expected { + _, ok := set[val] + assert.True(t, ok) + } + }) + } +} + +func TestTopicFilterMatches(t *testing.T) { + transferSym := xdr.ScSymbol("transfer") + transfer := xdr.ScVal{ + Type: xdr.ScValTypeScvSymbol, + Sym: &transferSym, + } + sixtyfour := xdr.Uint64(64) + number := xdr.ScVal{ + Type: xdr.ScValTypeScvU64, + U64: &sixtyfour, + } + star := "*" + for _, tc := range []struct { + name string + filter TopicFilter + includes []xdr.ScVec + excludes []xdr.ScVec + }{ + { + name: "", + filter: nil, + includes: []xdr.ScVec{ + {}, + }, + excludes: []xdr.ScVec{ + {transfer}, + }, + }, + + // Exact matching + { + name: "ScSymbol(transfer)", + filter: []SegmentFilter{ + {scval: &transfer}, + }, + includes: []xdr.ScVec{ + {transfer}, + }, + excludes: []xdr.ScVec{ + {number}, + {transfer, transfer}, + }, + }, + + // Star + { + name: "*", + filter: []SegmentFilter{ + {wildcard: &star}, + }, + includes: []xdr.ScVec{ + {transfer}, + }, + excludes: []xdr.ScVec{ + {transfer, transfer}, + }, + }, + { + name: "*/transfer", + filter: []SegmentFilter{ + {wildcard: &star}, + {scval: &transfer}, + }, + includes: []xdr.ScVec{ + {number, transfer}, + {transfer, transfer}, + }, + excludes: []xdr.ScVec{ + {number}, + {number, number}, + {number, transfer, number}, + {transfer}, + {transfer, number}, + {transfer, transfer, transfer}, + }, + }, + { + name: "transfer/*", + filter: []SegmentFilter{ + {scval: &transfer}, + {wildcard: &star}, + }, + includes: []xdr.ScVec{ + {transfer, number}, + {transfer, transfer}, + }, + excludes: []xdr.ScVec{ + {number}, + {number, number}, + {number, transfer, number}, + {transfer}, + {number, transfer}, + {transfer, transfer, transfer}, + }, + }, + { + name: "transfer/*/*", + filter: []SegmentFilter{ + {scval: &transfer}, + {wildcard: &star}, + {wildcard: &star}, + }, + includes: []xdr.ScVec{ + {transfer, number, number}, + {transfer, transfer, transfer}, + }, + excludes: []xdr.ScVec{ + {number}, + {number, number}, + {number, transfer}, + {number, transfer, number, number}, + {transfer}, + {transfer, transfer, transfer, transfer}, + }, + }, + { + name: "transfer/*/number", + filter: []SegmentFilter{ + {scval: &transfer}, + {wildcard: &star}, + {scval: &number}, + }, + includes: []xdr.ScVec{ + {transfer, number, number}, + {transfer, transfer, number}, + }, + excludes: []xdr.ScVec{ + {number}, + {number, number}, + {number, number, number}, + {number, transfer, number}, + {transfer}, + {number, transfer}, + {transfer, transfer, transfer}, + {transfer, number, transfer}, + }, + }, + } { + name := tc.name + if name == "" { + name = topicFilterToString(tc.filter) + } + t.Run(name, func(t *testing.T) { + for _, include := range tc.includes { + assert.True( + t, + tc.filter.Matches(include), + "Expected %v filter to include %v", + name, + include, + ) + } + for _, exclude := range tc.excludes { + assert.False( + t, + tc.filter.Matches(exclude), + "Expected %v filter to exclude %v", + name, + exclude, + ) + } + }) + } +} + +func TestTopicFilterJSON(t *testing.T) { + var got TopicFilter + + assert.NoError(t, json.Unmarshal([]byte("[]"), &got)) + assert.Equal(t, TopicFilter{}, got) + + star := "*" + assert.NoError(t, json.Unmarshal([]byte("[\"*\"]"), &got)) + assert.Equal(t, TopicFilter{{wildcard: &star}}, got) + + sixtyfour := xdr.Uint64(64) + scval := xdr.ScVal{Type: xdr.ScValTypeScvU64, U64: &sixtyfour} + scvalstr, err := xdr.MarshalBase64(scval) + assert.NoError(t, err) + assert.NoError(t, json.Unmarshal([]byte(fmt.Sprintf("[%q]", scvalstr)), &got)) + assert.Equal(t, TopicFilter{{scval: &scval}}, got) +} + +func topicFilterToString(t TopicFilter) string { + var s []string + for _, segment := range t { + if segment.wildcard != nil { + s = append(s, *segment.wildcard) + } else if segment.scval != nil { + out, err := xdr.MarshalBase64(*segment.scval) + if err != nil { + panic(err) + } + s = append(s, out) + } else { + panic("Invalid topic filter") + } + } + if len(s) == 0 { + s = append(s, "") + } + return strings.Join(s, "/") +} + +func TestGetEventsRequestValid(t *testing.T) { + // omit startLedger but include cursor + var request GetEventsRequest + assert.NoError(t, json.Unmarshal( + []byte("{ \"filters\": [], \"pagination\": { \"cursor\": \"0000000021474840576-0000000000\"} }"), + &request, + )) + assert.Equal(t, uint32(0), request.StartLedger) + assert.NoError(t, request.Valid(1000)) + + assert.EqualError(t, (&GetEventsRequest{ + StartLedger: 1, + Filters: []EventFilter{}, + Pagination: &PaginationOptions{Cursor: &events.Cursor{}}, + }).Valid(1000), "startLedger and cursor cannot both be set") + + assert.NoError(t, (&GetEventsRequest{ + StartLedger: 1, + Filters: []EventFilter{}, + Pagination: nil, + }).Valid(1000)) + + assert.EqualError(t, (&GetEventsRequest{ + StartLedger: 1, + Filters: []EventFilter{}, + Pagination: &PaginationOptions{Limit: 1001}, + }).Valid(1000), "limit must not exceed 1000") + + assert.EqualError(t, (&GetEventsRequest{ + StartLedger: 0, + Filters: []EventFilter{}, + Pagination: nil, + }).Valid(1000), "startLedger must be positive") + + assert.EqualError(t, (&GetEventsRequest{ + StartLedger: 1, + Filters: []EventFilter{ + {}, {}, {}, {}, {}, {}, + }, + Pagination: nil, + }).Valid(1000), "maximum 5 filters per request") + + assert.EqualError(t, (&GetEventsRequest{ + StartLedger: 1, + Filters: []EventFilter{ + {EventType: map[string]interface{}{"foo": nil}}, + }, + Pagination: nil, + }).Valid(1000), "filter 1 invalid: filter type invalid: if set, type must be either 'system', 'contract' or 'diagnostic'") + + assert.EqualError(t, (&GetEventsRequest{ + StartLedger: 1, + Filters: []EventFilter{ + {ContractIDs: []string{ + "CCVKVKVKVKVKVKVKVKVKVKVKVKVKVKVKVKVKVKVKVKVKVKVKVKVKUD2U", + "CC53XO53XO53XO53XO53XO53XO53XO53XO53XO53XO53XO53XO53WQD5", + "CDGMZTGMZTGMZTGMZTGMZTGMZTGMZTGMZTGMZTGMZTGMZTGMZTGMZLND", + "CDO53XO53XO53XO53XO53XO53XO53XO53XO53XO53XO53XO53XO53YUK", + "CDXO53XO53XO53XO53XO53XO53XO53XO53XO53XO53XO53XO53XO4M7R", + "CD7777777777777777777777777777777777777777777777777767GY", + }}, + }, + Pagination: nil, + }).Valid(1000), "filter 1 invalid: maximum 5 contract IDs per filter") + + assert.EqualError(t, (&GetEventsRequest{ + StartLedger: 1, + Filters: []EventFilter{ + {ContractIDs: []string{"a"}}, + }, + Pagination: nil, + }).Valid(1000), "filter 1 invalid: contract ID 1 invalid") + + assert.EqualError(t, (&GetEventsRequest{ + StartLedger: 1, + Filters: []EventFilter{ + {ContractIDs: []string{"CCVKVKVKVKVKVKVKVKVKVKVKVKVKVKVKVKVKVKVKVKVKVKVKVINVALID"}}, + }, + Pagination: nil, + }).Valid(1000), "filter 1 invalid: contract ID 1 invalid") + + assert.EqualError(t, (&GetEventsRequest{ + StartLedger: 1, + Filters: []EventFilter{ + { + Topics: []TopicFilter{ + {}, {}, {}, {}, {}, {}, + }, + }, + }, + Pagination: nil, + }).Valid(1000), "filter 1 invalid: maximum 5 topics per filter") + + assert.EqualError(t, (&GetEventsRequest{ + StartLedger: 1, + Filters: []EventFilter{ + {Topics: []TopicFilter{ + {}, + }}, + }, + Pagination: nil, + }).Valid(1000), "filter 1 invalid: topic 1 invalid: topic must have at least one segment") + + assert.EqualError(t, (&GetEventsRequest{ + StartLedger: 1, + Filters: []EventFilter{ + {Topics: []TopicFilter{ + { + {}, + {}, + {}, + {}, + {}, + }, + }}, + }, + Pagination: nil, + }).Valid(1000), "filter 1 invalid: topic 1 invalid: topic cannot have more than 4 segments") +} + +func TestGetEvents(t *testing.T) { + now := time.Now().UTC() + counter := xdr.ScSymbol("COUNTER") + counterScVal := xdr.ScVal{Type: xdr.ScValTypeScvSymbol, Sym: &counter} + counterXdr, err := xdr.MarshalBase64(counterScVal) + assert.NoError(t, err) + + t.Run("empty", func(t *testing.T) { + store := events.NewMemoryStore(interfaces.MakeNoOpDeamon(), "unit-tests", 100) + handler := eventsRPCHandler{ + scanner: store, + maxLimit: 10000, + defaultLimit: 100, + } + _, err = handler.getEvents(GetEventsRequest{ + StartLedger: 1, + }) + assert.EqualError(t, err, "[-32600] event store is empty") + }) + + t.Run("startLedger validation", func(t *testing.T) { + contractID := xdr.Hash([32]byte{}) + store := events.NewMemoryStore(interfaces.MakeNoOpDeamon(), "unit-tests", 100) + var txMeta []xdr.TransactionMeta + txMeta = append(txMeta, transactionMetaWithEvents( + contractEvent( + contractID, + xdr.ScVec{xdr.ScVal{ + Type: xdr.ScValTypeScvSymbol, + Sym: &counter, + }}, + xdr.ScVal{ + Type: xdr.ScValTypeScvSymbol, + Sym: &counter, + }, + ), + )) + assert.NoError(t, store.IngestEvents(ledgerCloseMetaWithEvents(2, now.Unix(), txMeta...))) + + handler := eventsRPCHandler{ + scanner: store, + maxLimit: 10000, + defaultLimit: 100, + } + _, err = handler.getEvents(GetEventsRequest{ + StartLedger: 1, + }) + assert.EqualError(t, err, "[-32600] start is before oldest ledger") + + _, err = handler.getEvents(GetEventsRequest{ + StartLedger: 3, + }) + assert.EqualError(t, err, "[-32600] start is after newest ledger") + }) + + t.Run("no filtering returns all", func(t *testing.T) { + contractID := xdr.Hash([32]byte{}) + store := events.NewMemoryStore(interfaces.MakeNoOpDeamon(), "unit-tests", 100) + var txMeta []xdr.TransactionMeta + for i := 0; i < 10; i++ { + txMeta = append(txMeta, transactionMetaWithEvents( + contractEvent( + contractID, + xdr.ScVec{xdr.ScVal{ + Type: xdr.ScValTypeScvSymbol, + Sym: &counter, + }}, + xdr.ScVal{ + Type: xdr.ScValTypeScvSymbol, + Sym: &counter, + }, + ), + )) + } + assert.NoError(t, store.IngestEvents(ledgerCloseMetaWithEvents(1, now.Unix(), txMeta...))) + + handler := eventsRPCHandler{ + scanner: store, + maxLimit: 10000, + defaultLimit: 100, + } + results, err := handler.getEvents(GetEventsRequest{ + StartLedger: 1, + }) + assert.NoError(t, err) + + var expected []EventInfo + for i := range txMeta { + id := events.Cursor{ + Ledger: 1, + Tx: uint32(i + 1), + Op: 0, + Event: 0, + }.String() + value, err := xdr.MarshalBase64(xdr.ScVal{ + Type: xdr.ScValTypeScvSymbol, + Sym: &counter, + }) + assert.NoError(t, err) + expected = append(expected, EventInfo{ + EventType: EventTypeContract, + Ledger: 1, + LedgerClosedAt: now.Format(time.RFC3339), + ContractID: "CAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABSC4", + ID: id, + PagingToken: id, + Topic: []string{value}, + Value: value, + InSuccessfulContractCall: true, + }) + } + assert.Equal(t, GetEventsResponse{expected, 1}, results) + }) + + t.Run("filtering by contract id", func(t *testing.T) { + store := events.NewMemoryStore(interfaces.MakeNoOpDeamon(), "unit-tests", 100) + var txMeta []xdr.TransactionMeta + contractIds := []xdr.Hash{ + xdr.Hash([32]byte{}), + xdr.Hash([32]byte{1}), + } + for i := 0; i < 5; i++ { + txMeta = append(txMeta, transactionMetaWithEvents( + contractEvent( + contractIds[i%len(contractIds)], + xdr.ScVec{xdr.ScVal{ + Type: xdr.ScValTypeScvSymbol, + Sym: &counter, + }}, + xdr.ScVal{ + Type: xdr.ScValTypeScvSymbol, + Sym: &counter, + }, + ), + )) + } + assert.NoError(t, store.IngestEvents(ledgerCloseMetaWithEvents(1, now.Unix(), txMeta...))) + + handler := eventsRPCHandler{ + scanner: store, + maxLimit: 10000, + defaultLimit: 100, + } + results, err := handler.getEvents(GetEventsRequest{ + StartLedger: 1, + Filters: []EventFilter{ + {ContractIDs: []string{strkey.MustEncode(strkey.VersionByteContract, contractIds[0][:])}}, + }, + }) + assert.NoError(t, err) + assert.Equal(t, int64(1), results.LatestLedger) + + expectedIds := []string{ + events.Cursor{Ledger: 1, Tx: 1, Op: 0, Event: 0}.String(), + events.Cursor{Ledger: 1, Tx: 3, Op: 0, Event: 0}.String(), + events.Cursor{Ledger: 1, Tx: 5, Op: 0, Event: 0}.String(), + } + eventIds := []string{} + for _, event := range results.Events { + eventIds = append(eventIds, event.ID) + } + assert.Equal(t, expectedIds, eventIds) + }) + + t.Run("filtering by topic", func(t *testing.T) { + store := events.NewMemoryStore(interfaces.MakeNoOpDeamon(), "unit-tests", 100) + var txMeta []xdr.TransactionMeta + contractID := xdr.Hash([32]byte{}) + for i := 0; i < 10; i++ { + number := xdr.Uint64(i) + txMeta = append(txMeta, transactionMetaWithEvents( + // Generate a unique topic like /counter/4 for each event so we can check + contractEvent( + contractID, + xdr.ScVec{ + xdr.ScVal{Type: xdr.ScValTypeScvSymbol, Sym: &counter}, + xdr.ScVal{Type: xdr.ScValTypeScvU64, U64: &number}, + }, + xdr.ScVal{Type: xdr.ScValTypeScvU64, U64: &number}, + ), + )) + } + assert.NoError(t, store.IngestEvents(ledgerCloseMetaWithEvents(1, now.Unix(), txMeta...))) + + number := xdr.Uint64(4) + handler := eventsRPCHandler{ + scanner: store, + maxLimit: 10000, + defaultLimit: 100, + } + results, err := handler.getEvents(GetEventsRequest{ + StartLedger: 1, + Filters: []EventFilter{ + {Topics: []TopicFilter{ + []SegmentFilter{ + {scval: &xdr.ScVal{Type: xdr.ScValTypeScvSymbol, Sym: &counter}}, + {scval: &xdr.ScVal{Type: xdr.ScValTypeScvU64, U64: &number}}, + }, + }}, + }, + }) + assert.NoError(t, err) + + id := events.Cursor{Ledger: 1, Tx: 5, Op: 0, Event: 0}.String() + assert.NoError(t, err) + value, err := xdr.MarshalBase64(xdr.ScVal{ + Type: xdr.ScValTypeScvU64, + U64: &number, + }) + assert.NoError(t, err) + expected := []EventInfo{ + { + EventType: EventTypeContract, + Ledger: 1, + LedgerClosedAt: now.Format(time.RFC3339), + ContractID: "CAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABSC4", + ID: id, + PagingToken: id, + Topic: []string{counterXdr, value}, + Value: value, + InSuccessfulContractCall: true, + }, + } + assert.Equal(t, GetEventsResponse{expected, 1}, results) + }) + + t.Run("filtering by both contract id and topic", func(t *testing.T) { + store := events.NewMemoryStore(interfaces.MakeNoOpDeamon(), "unit-tests", 100) + contractID := xdr.Hash([32]byte{}) + otherContractID := xdr.Hash([32]byte{1}) + number := xdr.Uint64(1) + txMeta := []xdr.TransactionMeta{ + // This matches neither the contract id nor the topic + transactionMetaWithEvents( + contractEvent( + otherContractID, + xdr.ScVec{ + xdr.ScVal{Type: xdr.ScValTypeScvSymbol, Sym: &counter}, + }, + xdr.ScVal{Type: xdr.ScValTypeScvU64, U64: &number}, + ), + ), + // This matches the contract id but not the topic + transactionMetaWithEvents( + contractEvent( + contractID, + xdr.ScVec{ + xdr.ScVal{Type: xdr.ScValTypeScvSymbol, Sym: &counter}, + }, + xdr.ScVal{Type: xdr.ScValTypeScvU64, U64: &number}, + ), + ), + // This matches the topic but not the contract id + transactionMetaWithEvents( + contractEvent( + otherContractID, + xdr.ScVec{ + xdr.ScVal{Type: xdr.ScValTypeScvSymbol, Sym: &counter}, + xdr.ScVal{Type: xdr.ScValTypeScvU64, U64: &number}, + }, + xdr.ScVal{Type: xdr.ScValTypeScvU64, U64: &number}, + ), + ), + // This matches both the contract id and the topic + transactionMetaWithEvents( + contractEvent( + contractID, + xdr.ScVec{ + xdr.ScVal{Type: xdr.ScValTypeScvSymbol, Sym: &counter}, + xdr.ScVal{Type: xdr.ScValTypeScvU64, U64: &number}, + }, + xdr.ScVal{Type: xdr.ScValTypeScvU64, U64: &number}, + ), + ), + } + assert.NoError(t, store.IngestEvents(ledgerCloseMetaWithEvents(1, now.Unix(), txMeta...))) + + handler := eventsRPCHandler{ + scanner: store, + maxLimit: 10000, + defaultLimit: 100, + } + results, err := handler.getEvents(GetEventsRequest{ + StartLedger: 1, + Filters: []EventFilter{ + { + ContractIDs: []string{strkey.MustEncode(strkey.VersionByteContract, contractID[:])}, + Topics: []TopicFilter{ + []SegmentFilter{ + {scval: &xdr.ScVal{Type: xdr.ScValTypeScvSymbol, Sym: &counter}}, + {scval: &xdr.ScVal{Type: xdr.ScValTypeScvU64, U64: &number}}, + }, + }, + }, + }, + }) + assert.NoError(t, err) + + id := events.Cursor{Ledger: 1, Tx: 4, Op: 0, Event: 0}.String() + value, err := xdr.MarshalBase64(xdr.ScVal{ + Type: xdr.ScValTypeScvU64, + U64: &number, + }) + assert.NoError(t, err) + expected := []EventInfo{ + { + EventType: EventTypeContract, + Ledger: 1, + LedgerClosedAt: now.Format(time.RFC3339), + ContractID: strkey.MustEncode(strkey.VersionByteContract, contractID[:]), + ID: id, + PagingToken: id, + Topic: []string{counterXdr, value}, + Value: value, + InSuccessfulContractCall: true, + }, + } + assert.Equal(t, GetEventsResponse{expected, 1}, results) + }) + + t.Run("filtering by event type", func(t *testing.T) { + store := events.NewMemoryStore(interfaces.MakeNoOpDeamon(), "unit-tests", 100) + contractID := xdr.Hash([32]byte{}) + txMeta := []xdr.TransactionMeta{ + transactionMetaWithEvents( + contractEvent( + contractID, + xdr.ScVec{ + xdr.ScVal{Type: xdr.ScValTypeScvSymbol, Sym: &counter}, + }, + xdr.ScVal{Type: xdr.ScValTypeScvSymbol, Sym: &counter}, + ), + systemEvent( + contractID, + xdr.ScVec{ + xdr.ScVal{Type: xdr.ScValTypeScvSymbol, Sym: &counter}, + }, + xdr.ScVal{Type: xdr.ScValTypeScvSymbol, Sym: &counter}, + ), + diagnosticEvent( + contractID, + xdr.ScVec{ + xdr.ScVal{Type: xdr.ScValTypeScvSymbol, Sym: &counter}, + }, + xdr.ScVal{Type: xdr.ScValTypeScvSymbol, Sym: &counter}, + ), + ), + } + assert.NoError(t, store.IngestEvents(ledgerCloseMetaWithEvents(1, now.Unix(), txMeta...))) + + handler := eventsRPCHandler{ + scanner: store, + maxLimit: 10000, + defaultLimit: 100, + } + results, err := handler.getEvents(GetEventsRequest{ + StartLedger: 1, + Filters: []EventFilter{ + {EventType: map[string]interface{}{EventTypeSystem: nil}}, + }, + }) + assert.NoError(t, err) + + id := events.Cursor{Ledger: 1, Tx: 1, Op: 0, Event: 1}.String() + expected := []EventInfo{ + { + EventType: EventTypeSystem, + Ledger: 1, + LedgerClosedAt: now.Format(time.RFC3339), + ContractID: strkey.MustEncode(strkey.VersionByteContract, contractID[:]), + ID: id, + PagingToken: id, + Topic: []string{counterXdr}, + Value: counterXdr, + InSuccessfulContractCall: true, + }, + } + assert.Equal(t, GetEventsResponse{expected, 1}, results) + }) + + t.Run("with limit", func(t *testing.T) { + store := events.NewMemoryStore(interfaces.MakeNoOpDeamon(), "unit-tests", 100) + contractID := xdr.Hash([32]byte{}) + var txMeta []xdr.TransactionMeta + for i := 0; i < 180; i++ { + number := xdr.Uint64(i) + txMeta = append(txMeta, transactionMetaWithEvents( + contractEvent( + contractID, + xdr.ScVec{ + xdr.ScVal{Type: xdr.ScValTypeScvU64, U64: &number}, + }, + xdr.ScVal{Type: xdr.ScValTypeScvU64, U64: &number}, + ), + )) + } + assert.NoError(t, store.IngestEvents(ledgerCloseMetaWithEvents(1, now.Unix(), txMeta...))) + + handler := eventsRPCHandler{ + scanner: store, + maxLimit: 10000, + defaultLimit: 100, + } + results, err := handler.getEvents(GetEventsRequest{ + StartLedger: 1, + Filters: []EventFilter{}, + Pagination: &PaginationOptions{Limit: 10}, + }) + assert.NoError(t, err) + + var expected []EventInfo + for i := 0; i < 10; i++ { + id := events.Cursor{ + Ledger: 1, + Tx: uint32(i + 1), + Op: 0, + Event: 0, + }.String() + value, err := xdr.MarshalBase64(txMeta[i].MustV3().SorobanMeta.Events[0].Body.MustV0().Data) + assert.NoError(t, err) + expected = append(expected, EventInfo{ + EventType: EventTypeContract, + Ledger: 1, + LedgerClosedAt: now.Format(time.RFC3339), + ContractID: "CAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABSC4", + ID: id, + PagingToken: id, + Topic: []string{value}, + Value: value, + InSuccessfulContractCall: true, + }) + } + assert.Equal(t, GetEventsResponse{expected, 1}, results) + }) + + t.Run("with cursor", func(t *testing.T) { + store := events.NewMemoryStore(interfaces.MakeNoOpDeamon(), "unit-tests", 100) + contractID := xdr.Hash([32]byte{}) + datas := []xdr.ScSymbol{ + // ledger/transaction/operation/event + xdr.ScSymbol("5/1/0/0"), + xdr.ScSymbol("5/1/0/1"), + xdr.ScSymbol("5/2/0/0"), + xdr.ScSymbol("5/2/0/1"), + } + txMeta := []xdr.TransactionMeta{ + transactionMetaWithEvents( + contractEvent( + contractID, + xdr.ScVec{ + counterScVal, + }, + xdr.ScVal{Type: xdr.ScValTypeScvSymbol, Sym: &datas[0]}, + ), + contractEvent( + contractID, + xdr.ScVec{ + counterScVal, + }, + xdr.ScVal{Type: xdr.ScValTypeScvSymbol, Sym: &datas[1]}, + ), + ), + transactionMetaWithEvents( + contractEvent( + contractID, + xdr.ScVec{ + counterScVal, + }, + xdr.ScVal{Type: xdr.ScValTypeScvSymbol, Sym: &datas[2]}, + ), + contractEvent( + contractID, + xdr.ScVec{ + counterScVal, + }, + xdr.ScVal{Type: xdr.ScValTypeScvSymbol, Sym: &datas[3]}, + ), + ), + } + assert.NoError(t, store.IngestEvents(ledgerCloseMetaWithEvents(5, now.Unix(), txMeta...))) + + id := &events.Cursor{Ledger: 5, Tx: 1, Op: 0, Event: 0} + handler := eventsRPCHandler{ + scanner: store, + maxLimit: 10000, + defaultLimit: 100, + } + results, err := handler.getEvents(GetEventsRequest{ + Pagination: &PaginationOptions{ + Cursor: id, + Limit: 2, + }, + }) + assert.NoError(t, err) + + var expected []EventInfo + expectedIDs := []string{ + events.Cursor{Ledger: 5, Tx: 1, Op: 0, Event: 1}.String(), + events.Cursor{Ledger: 5, Tx: 2, Op: 0, Event: 0}.String(), + } + symbols := datas[1:3] + for i, id := range expectedIDs { + expectedXdr, err := xdr.MarshalBase64(xdr.ScVal{Type: xdr.ScValTypeScvSymbol, Sym: &symbols[i]}) + assert.NoError(t, err) + expected = append(expected, EventInfo{ + EventType: EventTypeContract, + Ledger: 5, + LedgerClosedAt: now.Format(time.RFC3339), + ContractID: strkey.MustEncode(strkey.VersionByteContract, contractID[:]), + ID: id, + PagingToken: id, + Topic: []string{counterXdr}, + Value: expectedXdr, + InSuccessfulContractCall: true, + }) + } + assert.Equal(t, GetEventsResponse{expected, 5}, results) + + results, err = handler.getEvents(GetEventsRequest{ + Pagination: &PaginationOptions{ + Cursor: &events.Cursor{Ledger: 5, Tx: 2, Op: 0, Event: 1}, + Limit: 2, + }, + }) + assert.NoError(t, err) + assert.Equal(t, GetEventsResponse{[]EventInfo{}, 5}, results) + }) +} + +func ledgerCloseMetaWithEvents(sequence uint32, closeTimestamp int64, txMeta ...xdr.TransactionMeta) xdr.LedgerCloseMeta { + var txProcessing []xdr.TransactionResultMeta + var phases []xdr.TransactionPhase + + for _, item := range txMeta { + var operations []xdr.Operation + for range item.MustV3().SorobanMeta.Events { + operations = append(operations, + xdr.Operation{ + Body: xdr.OperationBody{ + Type: xdr.OperationTypeInvokeHostFunction, + InvokeHostFunctionOp: &xdr.InvokeHostFunctionOp{ + HostFunction: xdr.HostFunction{ + Type: xdr.HostFunctionTypeHostFunctionTypeInvokeContract, + InvokeContract: &xdr.InvokeContractArgs{ + ContractAddress: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &xdr.Hash{0x1, 0x2}, + }, + FunctionName: "foo", + Args: nil, + }, + }, + Auth: []xdr.SorobanAuthorizationEntry{}, + }, + }, + }) + } + envelope := xdr.TransactionEnvelope{ + Type: xdr.EnvelopeTypeEnvelopeTypeTx, + V1: &xdr.TransactionV1Envelope{ + Tx: xdr.Transaction{ + SourceAccount: xdr.MustMuxedAddress(keypair.MustRandom().Address()), + Operations: operations, + }, + }, + } + txHash, err := network.HashTransactionInEnvelope(envelope, "unit-tests") + if err != nil { + panic(err) + } + + txProcessing = append(txProcessing, xdr.TransactionResultMeta{ + TxApplyProcessing: item, + Result: xdr.TransactionResultPair{ + TransactionHash: txHash, + }, + }) + components := []xdr.TxSetComponent{ + { + Type: xdr.TxSetComponentTypeTxsetCompTxsMaybeDiscountedFee, + TxsMaybeDiscountedFee: &xdr.TxSetComponentTxsMaybeDiscountedFee{ + Txs: []xdr.TransactionEnvelope{ + envelope, + }, + }, + }, + } + phases = append(phases, xdr.TransactionPhase{ + V: 0, + V0Components: &components, + }) + } + + return xdr.LedgerCloseMeta{ + V: 1, + V1: &xdr.LedgerCloseMetaV1{ + LedgerHeader: xdr.LedgerHeaderHistoryEntry{ + Hash: xdr.Hash{}, + Header: xdr.LedgerHeader{ + ScpValue: xdr.StellarValue{ + CloseTime: xdr.TimePoint(closeTimestamp), + }, + LedgerSeq: xdr.Uint32(sequence), + }, + }, + TxSet: xdr.GeneralizedTransactionSet{ + V: 1, + V1TxSet: &xdr.TransactionSetV1{ + PreviousLedgerHash: xdr.Hash{}, + Phases: phases, + }, + }, + TxProcessing: txProcessing, + }, + } +} + +func transactionMetaWithEvents(events ...xdr.ContractEvent) xdr.TransactionMeta { + return xdr.TransactionMeta{ + V: 3, + Operations: &[]xdr.OperationMeta{}, + V3: &xdr.TransactionMetaV3{ + SorobanMeta: &xdr.SorobanTransactionMeta{ + Events: events, + }, + }, + } +} + +func contractEvent(contractID xdr.Hash, topic []xdr.ScVal, body xdr.ScVal) xdr.ContractEvent { + return xdr.ContractEvent{ + ContractId: &contractID, + Type: xdr.ContractEventTypeContract, + Body: xdr.ContractEventBody{ + V: 0, + V0: &xdr.ContractEventV0{ + Topics: topic, + Data: body, + }, + }, + } +} + +func systemEvent(contractID xdr.Hash, topic []xdr.ScVal, body xdr.ScVal) xdr.ContractEvent { + return xdr.ContractEvent{ + ContractId: &contractID, + Type: xdr.ContractEventTypeSystem, + Body: xdr.ContractEventBody{ + V: 0, + V0: &xdr.ContractEventV0{ + Topics: topic, + Data: body, + }, + }, + } +} + +func diagnosticEvent(contractID xdr.Hash, topic []xdr.ScVal, body xdr.ScVal) xdr.ContractEvent { + return xdr.ContractEvent{ + ContractId: &contractID, + Type: xdr.ContractEventTypeDiagnostic, + Body: xdr.ContractEventBody{ + V: 0, + V0: &xdr.ContractEventV0{ + Topics: topic, + Data: body, + }, + }, + } +} diff --git a/cmd/soroban-rpc/internal/methods/get_latest_ledger.go b/cmd/soroban-rpc/internal/methods/get_latest_ledger.go new file mode 100644 index 00000000..11bd997a --- /dev/null +++ b/cmd/soroban-rpc/internal/methods/get_latest_ledger.go @@ -0,0 +1,58 @@ +package methods + +import ( + "context" + + "github.com/creachadair/jrpc2" + "github.com/creachadair/jrpc2/handler" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/db" +) + +type GetLatestLedgerResponse struct { + // Hash of the latest ledger as a hex-encoded string + Hash string `json:"id"` + // Stellar Core protocol version associated with the ledger. + ProtocolVersion uint32 `json:"protocolVersion"` + // Sequence number of the latest ledger. + Sequence uint32 `json:"sequence"` +} + +// NewGetLatestLedgerHandler returns a JSON RPC handler to retrieve the latest ledger entry from Stellar core. +func NewGetLatestLedgerHandler(ledgerEntryReader db.LedgerEntryReader, ledgerReader db.LedgerReader) jrpc2.Handler { + return handler.New(func(ctx context.Context) (GetLatestLedgerResponse, error) { + tx, err := ledgerEntryReader.NewTx(ctx) + if err != nil { + return GetLatestLedgerResponse{}, &jrpc2.Error{ + Code: jrpc2.InternalError, + Message: "could not create read transaction", + } + } + defer func() { + _ = tx.Done() + }() + + latestSequence, err := tx.GetLatestLedgerSequence() + if err != nil { + return GetLatestLedgerResponse{}, &jrpc2.Error{ + Code: jrpc2.InternalError, + Message: "could not get latest ledger sequence", + } + } + + latestLedger, found, err := ledgerReader.GetLedger(ctx, latestSequence) + if (err != nil) || (!found) { + return GetLatestLedgerResponse{}, &jrpc2.Error{ + Code: jrpc2.InternalError, + Message: "could not get latest ledger", + } + } + + response := GetLatestLedgerResponse{ + Hash: latestLedger.LedgerHash().HexString(), + ProtocolVersion: latestLedger.ProtocolVersion(), + Sequence: latestSequence, + } + return response, nil + }) +} diff --git a/cmd/soroban-rpc/internal/methods/get_latest_ledger_test.go b/cmd/soroban-rpc/internal/methods/get_latest_ledger_test.go new file mode 100644 index 00000000..474b3b8d --- /dev/null +++ b/cmd/soroban-rpc/internal/methods/get_latest_ledger_test.go @@ -0,0 +1,87 @@ +package methods + +import ( + "context" + "testing" + + "github.com/creachadair/jrpc2" + "github.com/stellar/go/xdr" + "github.com/stretchr/testify/assert" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/db" +) + +const ( + expectedLatestLedgerSequence uint32 = 960 + expectedLatestLedgerProtocolVersion uint32 = 20 + expectedLatestLedgerHashBytes byte = 42 +) + +type ConstantLedgerEntryReader struct { +} + +type ConstantLedgerEntryReaderTx struct { +} + +type ConstantLedgerReader struct { +} + +func (entryReader *ConstantLedgerEntryReader) GetLatestLedgerSequence(ctx context.Context) (uint32, error) { + return expectedLatestLedgerSequence, nil +} + +func (entryReader *ConstantLedgerEntryReader) NewTx(ctx context.Context) (db.LedgerEntryReadTx, error) { + return ConstantLedgerEntryReaderTx{}, nil +} + +func (entryReader *ConstantLedgerEntryReader) NewCachedTx(ctx context.Context) (db.LedgerEntryReadTx, error) { + return ConstantLedgerEntryReaderTx{}, nil +} + +func (entryReaderTx ConstantLedgerEntryReaderTx) GetLatestLedgerSequence() (uint32, error) { + return expectedLatestLedgerSequence, nil +} + +func (entryReaderTx ConstantLedgerEntryReaderTx) GetLedgerEntries(keys ...xdr.LedgerKey) ([]db.LedgerKeyAndEntry, error) { + return nil, nil +} + +func (entryReaderTx ConstantLedgerEntryReaderTx) Done() error { + return nil +} + +func (ledgerReader *ConstantLedgerReader) GetLedger(ctx context.Context, sequence uint32) (xdr.LedgerCloseMeta, bool, error) { + return createLedger(sequence, expectedLatestLedgerProtocolVersion, expectedLatestLedgerHashBytes), true, nil +} + +func (ledgerReader *ConstantLedgerReader) StreamAllLedgers(ctx context.Context, f db.StreamLedgerFn) error { + return nil +} + +func createLedger(ledgerSequence uint32, protocolVersion uint32, hash byte) xdr.LedgerCloseMeta { + return xdr.LedgerCloseMeta{ + V: 1, + V1: &xdr.LedgerCloseMetaV1{ + LedgerHeader: xdr.LedgerHeaderHistoryEntry{ + Hash: xdr.Hash{hash}, + Header: xdr.LedgerHeader{ + LedgerSeq: xdr.Uint32(ledgerSequence), + LedgerVersion: xdr.Uint32(protocolVersion), + }, + }, + }, + } +} + +func TestGetLatestLedger(t *testing.T) { + getLatestLedgerHandler := NewGetLatestLedgerHandler(&ConstantLedgerEntryReader{}, &ConstantLedgerReader{}) + latestLedgerRespI, err := getLatestLedgerHandler(context.Background(), &jrpc2.Request{}) + latestLedgerResp := latestLedgerRespI.(GetLatestLedgerResponse) + assert.NoError(t, err) + + expectedLatestLedgerHashStr := xdr.Hash{expectedLatestLedgerHashBytes}.HexString() + assert.Equal(t, expectedLatestLedgerHashStr, latestLedgerResp.Hash) + + assert.Equal(t, expectedLatestLedgerProtocolVersion, latestLedgerResp.ProtocolVersion) + assert.Equal(t, expectedLatestLedgerSequence, latestLedgerResp.Sequence) +} diff --git a/cmd/soroban-rpc/internal/methods/get_ledger_entries.go b/cmd/soroban-rpc/internal/methods/get_ledger_entries.go new file mode 100644 index 00000000..4063858c --- /dev/null +++ b/cmd/soroban-rpc/internal/methods/get_ledger_entries.go @@ -0,0 +1,137 @@ +package methods + +import ( + "context" + "fmt" + + "github.com/creachadair/jrpc2" + "github.com/creachadair/jrpc2/handler" + "github.com/stellar/go/support/log" + "github.com/stellar/go/xdr" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/db" +) + +var ErrLedgerTtlEntriesCannotBeQueriedDirectly = "ledger ttl entries cannot be queried directly" + +type GetLedgerEntriesRequest struct { + Keys []string `json:"keys"` +} + +type LedgerEntryResult struct { + // Original request key matching this LedgerEntryResult. + Key string `json:"key"` + // Ledger entry data encoded in base 64. + XDR string `json:"xdr"` + // Last modified ledger for this entry. + LastModifiedLedger uint32 `json:"lastModifiedLedgerSeq"` + // The ledger sequence until the entry is live, available for entries that have associated ttl ledger entries. + LiveUntilLedgerSeq *uint32 `json:"liveUntilLedgerSeq,omitempty"` +} + +type GetLedgerEntriesResponse struct { + // All found ledger entries. + Entries []LedgerEntryResult `json:"entries"` + // Sequence number of the latest ledger at time of request. + LatestLedger uint32 `json:"latestLedger"` +} + +const getLedgerEntriesMaxKeys = 200 + +// NewGetLedgerEntriesHandler returns a JSON RPC handler to retrieve the specified ledger entries from Stellar Core. +func NewGetLedgerEntriesHandler(logger *log.Entry, ledgerEntryReader db.LedgerEntryReader) jrpc2.Handler { + return handler.New(func(ctx context.Context, request GetLedgerEntriesRequest) (GetLedgerEntriesResponse, error) { + if len(request.Keys) > getLedgerEntriesMaxKeys { + return GetLedgerEntriesResponse{}, &jrpc2.Error{ + Code: jrpc2.InvalidParams, + Message: fmt.Sprintf("key count (%d) exceeds maximum supported (%d)", len(request.Keys), getLedgerEntriesMaxKeys), + } + } + var ledgerKeys []xdr.LedgerKey + for i, requestKey := range request.Keys { + var ledgerKey xdr.LedgerKey + if err := xdr.SafeUnmarshalBase64(requestKey, &ledgerKey); err != nil { + logger.WithError(err).WithField("request", request). + Infof("could not unmarshal requestKey %s at index %d from getLedgerEntries request", requestKey, i) + return GetLedgerEntriesResponse{}, &jrpc2.Error{ + Code: jrpc2.InvalidParams, + Message: fmt.Sprintf("cannot unmarshal key value %s at index %d", requestKey, i), + } + } + if ledgerKey.Type == xdr.LedgerEntryTypeTtl { + logger.WithField("request", request). + Infof("could not provide ledger ttl entry %s at index %d from getLedgerEntries request", requestKey, i) + return GetLedgerEntriesResponse{}, &jrpc2.Error{ + Code: jrpc2.InvalidParams, + Message: ErrLedgerTtlEntriesCannotBeQueriedDirectly, + } + } + ledgerKeys = append(ledgerKeys, ledgerKey) + } + + tx, err := ledgerEntryReader.NewTx(ctx) + if err != nil { + return GetLedgerEntriesResponse{}, &jrpc2.Error{ + Code: jrpc2.InternalError, + Message: "could not create read transaction", + } + } + defer func() { + _ = tx.Done() + }() + + latestLedger, err := tx.GetLatestLedgerSequence() + if err != nil { + return GetLedgerEntriesResponse{}, &jrpc2.Error{ + Code: jrpc2.InternalError, + Message: "could not get latest ledger", + } + } + + ledgerEntryResults := make([]LedgerEntryResult, 0, len(ledgerKeys)) + ledgerKeysAndEntries, err := tx.GetLedgerEntries(ledgerKeys...) + if err != nil { + logger.WithError(err).WithField("request", request). + Info("could not obtain ledger entries from storage") + return GetLedgerEntriesResponse{}, &jrpc2.Error{ + Code: jrpc2.InternalError, + Message: "could not obtain ledger entries from storage", + } + } + + for _, ledgerKeyAndEntry := range ledgerKeysAndEntries { + keyXDR, err := xdr.MarshalBase64(ledgerKeyAndEntry.Key) + if err != nil { + logger.WithError(err).WithField("request", request). + Infof("could not serialize ledger key %v", ledgerKeyAndEntry.Key) + return GetLedgerEntriesResponse{}, &jrpc2.Error{ + Code: jrpc2.InternalError, + Message: fmt.Sprintf("could not serialize ledger key %v", ledgerKeyAndEntry.Key), + } + } + + entryXDR, err := xdr.MarshalBase64(ledgerKeyAndEntry.Entry.Data) + if err != nil { + logger.WithError(err).WithField("request", request). + Infof("could not serialize ledger entry data for ledger entry %v", ledgerKeyAndEntry.Entry) + return GetLedgerEntriesResponse{}, &jrpc2.Error{ + Code: jrpc2.InternalError, + Message: fmt.Sprintf("could not serialize ledger entry data for ledger entry %v", ledgerKeyAndEntry.Entry), + } + } + + ledgerEntryResults = append(ledgerEntryResults, LedgerEntryResult{ + Key: keyXDR, + XDR: entryXDR, + LastModifiedLedger: uint32(ledgerKeyAndEntry.Entry.LastModifiedLedgerSeq), + LiveUntilLedgerSeq: ledgerKeyAndEntry.LiveUntilLedgerSeq, + }) + } + + response := GetLedgerEntriesResponse{ + Entries: ledgerEntryResults, + LatestLedger: uint32(latestLedger), + } + return response, nil + }) +} diff --git a/cmd/soroban-rpc/internal/methods/get_ledger_entry.go b/cmd/soroban-rpc/internal/methods/get_ledger_entry.go new file mode 100644 index 00000000..b78d1099 --- /dev/null +++ b/cmd/soroban-rpc/internal/methods/get_ledger_entry.go @@ -0,0 +1,106 @@ +package methods + +import ( + "context" + "fmt" + + "github.com/creachadair/jrpc2" + "github.com/creachadair/jrpc2/handler" + + "github.com/stellar/go/support/log" + "github.com/stellar/go/xdr" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/db" +) + +// Deprecated. Use GetLedgerEntriesRequest instead. +// TODO(https://github.com/stellar/soroban-tools/issues/374) remove after getLedgerEntries is deployed. +type GetLedgerEntryRequest struct { + Key string `json:"key"` +} + +// Deprecated. Use GetLedgerEntriesResponse instead. +// TODO(https://github.com/stellar/soroban-tools/issues/374) remove after getLedgerEntries is deployed. +type GetLedgerEntryResponse struct { + XDR string `json:"xdr"` + LastModifiedLedger uint32 `json:"lastModifiedLedgerSeq"` + LatestLedger uint32 `json:"latestLedger"` + // The ledger sequence until the entry is live, available for entries that have associated ttl ledger entries. + LiveUntilLedgerSeq *uint32 `json:"LiveUntilLedgerSeq,omitempty"` +} + +// NewGetLedgerEntryHandler returns a json rpc handler to retrieve the specified ledger entry from stellar core +// Deprecated. use NewGetLedgerEntriesHandler instead. +// TODO(https://github.com/stellar/soroban-tools/issues/374) remove after getLedgerEntries is deployed. +func NewGetLedgerEntryHandler(logger *log.Entry, ledgerEntryReader db.LedgerEntryReader) jrpc2.Handler { + return handler.New(func(ctx context.Context, request GetLedgerEntryRequest) (GetLedgerEntryResponse, error) { + var key xdr.LedgerKey + if err := xdr.SafeUnmarshalBase64(request.Key, &key); err != nil { + logger.WithError(err).WithField("request", request). + Info("could not unmarshal ledgerKey from getLedgerEntry request") + return GetLedgerEntryResponse{}, &jrpc2.Error{ + Code: jrpc2.InvalidParams, + Message: "cannot unmarshal key value", + } + } + + if key.Type == xdr.LedgerEntryTypeTtl { + return GetLedgerEntryResponse{}, &jrpc2.Error{ + Code: jrpc2.InvalidParams, + Message: ErrLedgerTtlEntriesCannotBeQueriedDirectly, + } + } + + tx, err := ledgerEntryReader.NewTx(ctx) + if err != nil { + return GetLedgerEntryResponse{}, &jrpc2.Error{ + Code: jrpc2.InternalError, + Message: "could not create read transaction", + } + } + defer func() { + _ = tx.Done() + }() + + latestLedger, err := tx.GetLatestLedgerSequence() + if err != nil { + return GetLedgerEntryResponse{}, &jrpc2.Error{ + Code: jrpc2.InternalError, + Message: "could not get latest ledger", + } + } + + present, ledgerEntry, liveUntilLedgerSeq, err := db.GetLedgerEntry(tx, key) + if err != nil { + logger.WithError(err).WithField("request", request). + Info("could not obtain ledger entry from storage") + return GetLedgerEntryResponse{}, &jrpc2.Error{ + Code: jrpc2.InternalError, + Message: "could not obtain ledger entry from storage", + } + } + + if !present { + return GetLedgerEntryResponse{}, &jrpc2.Error{ + Code: jrpc2.InvalidRequest, + Message: fmt.Sprintf("not found (at ledger %d)", latestLedger), + } + } + + response := GetLedgerEntryResponse{ + LastModifiedLedger: uint32(ledgerEntry.LastModifiedLedgerSeq), + LatestLedger: latestLedger, + LiveUntilLedgerSeq: liveUntilLedgerSeq, + } + if response.XDR, err = xdr.MarshalBase64(ledgerEntry.Data); err != nil { + logger.WithError(err).WithField("request", request). + Info("could not serialize ledger entry data") + return GetLedgerEntryResponse{}, &jrpc2.Error{ + Code: jrpc2.InternalError, + Message: "could not serialize ledger entry data", + } + } + + return response, nil + }) +} diff --git a/cmd/soroban-rpc/internal/methods/get_network.go b/cmd/soroban-rpc/internal/methods/get_network.go new file mode 100644 index 00000000..be2e0305 --- /dev/null +++ b/cmd/soroban-rpc/internal/methods/get_network.go @@ -0,0 +1,37 @@ +package methods + +import ( + "context" + + "github.com/creachadair/jrpc2" + "github.com/creachadair/jrpc2/handler" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/daemon/interfaces" +) + +type GetNetworkRequest struct{} + +type GetNetworkResponse struct { + FriendbotURL string `json:"friendbotUrl,omitempty"` + Passphrase string `json:"passphrase"` + ProtocolVersion int `json:"protocolVersion"` +} + +// NewGetNetworkHandler returns a json rpc handler to for the getNetwork method +func NewGetNetworkHandler(daemon interfaces.Daemon, networkPassphrase, friendbotURL string) jrpc2.Handler { + coreClient := daemon.CoreClient() + return handler.New(func(ctx context.Context, request GetNetworkRequest) (GetNetworkResponse, error) { + info, err := coreClient.Info(ctx) + if err != nil { + return GetNetworkResponse{}, (&jrpc2.Error{ + Code: jrpc2.InternalError, + Message: err.Error(), + }) + } + return GetNetworkResponse{ + FriendbotURL: friendbotURL, + Passphrase: networkPassphrase, + ProtocolVersion: info.Info.ProtocolVersion, + }, nil + }) +} diff --git a/cmd/soroban-rpc/internal/methods/get_transaction.go b/cmd/soroban-rpc/internal/methods/get_transaction.go new file mode 100644 index 00000000..7a2fe657 --- /dev/null +++ b/cmd/soroban-rpc/internal/methods/get_transaction.go @@ -0,0 +1,120 @@ +package methods + +import ( + "context" + "encoding/base64" + "encoding/hex" + "fmt" + + "github.com/creachadair/jrpc2" + "github.com/creachadair/jrpc2/handler" + "github.com/stellar/go/xdr" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/transactions" +) + +const ( + // TransactionStatusSuccess indicates the transaction was included in the ledger and + // it was executed without errors. + TransactionStatusSuccess = "SUCCESS" + // TransactionStatusNotFound indicates the transaction was not found in Soroban-RPC's + // transaction store. + TransactionStatusNotFound = "NOT_FOUND" + // TransactionStatusFailed indicates the transaction was included in the ledger and + // it was executed with an error. + TransactionStatusFailed = "FAILED" +) + +// GetTransactionResponse is the response for the Soroban-RPC getTransaction() endpoint +type GetTransactionResponse struct { + // Status is one of: TransactionSuccess, TransactionNotFound, or TransactionFailed. + Status string `json:"status"` + // LatestLedger is the latest ledger stored in Soroban-RPC. + LatestLedger uint32 `json:"latestLedger"` + // LatestLedgerCloseTime is the unix timestamp of when the latest ledger was closed. + LatestLedgerCloseTime int64 `json:"latestLedgerCloseTime,string"` + // LatestLedger is the oldest ledger stored in Soroban-RPC. + OldestLedger uint32 `json:"oldestLedger"` + // LatestLedgerCloseTime is the unix timestamp of when the oldest ledger was closed. + OldestLedgerCloseTime int64 `json:"oldestLedgerCloseTime,string"` + + // The fields below are only present if Status is not TransactionNotFound. + + // ApplicationOrder is the index of the transaction among all the transactions + // for that ledger. + ApplicationOrder int32 `json:"applicationOrder,omitempty"` + // FeeBump indicates whether the transaction is a feebump transaction + FeeBump bool `json:"feeBump,omitempty"` + // EnvelopeXdr is the TransactionEnvelope XDR value. + EnvelopeXdr string `json:"envelopeXdr,omitempty"` + // ResultXdr is the TransactionResult XDR value. + ResultXdr string `json:"resultXdr,omitempty"` + // ResultMetaXdr is the TransactionMeta XDR value. + ResultMetaXdr string `json:"resultMetaXdr,omitempty"` + + // Ledger is the sequence of the ledger which included the transaction. + Ledger uint32 `json:"ledger,omitempty"` + // LedgerCloseTime is the unix timestamp of when the transaction was included in the ledger. + LedgerCloseTime int64 `json:"createdAt,string,omitempty"` +} + +type GetTransactionRequest struct { + Hash string `json:"hash"` +} + +type transactionGetter interface { + GetTransaction(hash xdr.Hash) (transactions.Transaction, bool, transactions.StoreRange) +} + +func GetTransaction(getter transactionGetter, request GetTransactionRequest) (GetTransactionResponse, error) { + // parse hash + if hex.DecodedLen(len(request.Hash)) != len(xdr.Hash{}) { + return GetTransactionResponse{}, &jrpc2.Error{ + Code: jrpc2.InvalidParams, + Message: fmt.Sprintf("unexpected hash length (%d)", len(request.Hash)), + } + } + + var txHash xdr.Hash + _, err := hex.Decode(txHash[:], []byte(request.Hash)) + if err != nil { + return GetTransactionResponse{}, &jrpc2.Error{ + Code: jrpc2.InvalidParams, + Message: fmt.Sprintf("incorrect hash: %v", err), + } + } + + tx, found, storeRange := getter.GetTransaction(txHash) + response := GetTransactionResponse{ + LatestLedger: storeRange.LastLedger.Sequence, + LatestLedgerCloseTime: storeRange.LastLedger.CloseTime, + OldestLedger: storeRange.FirstLedger.Sequence, + OldestLedgerCloseTime: storeRange.FirstLedger.CloseTime, + } + if !found { + response.Status = TransactionStatusNotFound + return response, nil + } + + response.ApplicationOrder = tx.ApplicationOrder + response.FeeBump = tx.FeeBump + response.Ledger = tx.Ledger.Sequence + response.LedgerCloseTime = tx.Ledger.CloseTime + + response.ResultXdr = base64.StdEncoding.EncodeToString(tx.Result) + response.EnvelopeXdr = base64.StdEncoding.EncodeToString(tx.Envelope) + response.ResultMetaXdr = base64.StdEncoding.EncodeToString(tx.Meta) + if tx.Successful { + response.Status = TransactionStatusSuccess + } else { + response.Status = TransactionStatusFailed + } + return response, nil +} + +// NewGetTransactionHandler returns a get transaction json rpc handler +func NewGetTransactionHandler(getter transactionGetter) jrpc2.Handler { + return handler.New(func(ctx context.Context, request GetTransactionRequest) (GetTransactionResponse, error) { + return GetTransaction(getter, request) + }) +} diff --git a/cmd/soroban-rpc/internal/methods/get_transaction_test.go b/cmd/soroban-rpc/internal/methods/get_transaction_test.go new file mode 100644 index 00000000..85847f00 --- /dev/null +++ b/cmd/soroban-rpc/internal/methods/get_transaction_test.go @@ -0,0 +1,210 @@ +package methods + +import ( + "encoding/hex" + "testing" + + "github.com/stellar/go/xdr" + "github.com/stretchr/testify/require" + + "github.com/stellar/go/network" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/daemon/interfaces" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/transactions" +) + +func txHash(acctSeq uint32) xdr.Hash { + envelope := txEnvelope(acctSeq) + hash, err := network.HashTransactionInEnvelope(envelope, "passphrase") + if err != nil { + panic(err) + } + + return hash +} + +func ledgerCloseTime(ledgerSequence uint32) int64 { + return int64(ledgerSequence)*25 + 100 +} + +func transactionResult(successful bool) xdr.TransactionResult { + code := xdr.TransactionResultCodeTxBadSeq + if successful { + code = xdr.TransactionResultCodeTxSuccess + } + opResults := []xdr.OperationResult{} + return xdr.TransactionResult{ + FeeCharged: 100, + Result: xdr.TransactionResultResult{ + Code: code, + Results: &opResults, + }, + } +} + +func txMeta(acctSeq uint32, successful bool) xdr.LedgerCloseMeta { + envelope := txEnvelope(acctSeq) + + txProcessing := []xdr.TransactionResultMeta{ + { + TxApplyProcessing: xdr.TransactionMeta{ + V: 3, + Operations: &[]xdr.OperationMeta{}, + V3: &xdr.TransactionMetaV3{}, + }, + Result: xdr.TransactionResultPair{ + TransactionHash: txHash(acctSeq), + Result: transactionResult(successful), + }, + }, + } + + components := []xdr.TxSetComponent{ + { + Type: xdr.TxSetComponentTypeTxsetCompTxsMaybeDiscountedFee, + TxsMaybeDiscountedFee: &xdr.TxSetComponentTxsMaybeDiscountedFee{ + BaseFee: nil, + Txs: []xdr.TransactionEnvelope{ + envelope, + }, + }, + }, + } + return xdr.LedgerCloseMeta{ + V: 1, + V1: &xdr.LedgerCloseMetaV1{ + LedgerHeader: xdr.LedgerHeaderHistoryEntry{ + Header: xdr.LedgerHeader{ + ScpValue: xdr.StellarValue{ + CloseTime: xdr.TimePoint(ledgerCloseTime(acctSeq + 100)), + }, + LedgerSeq: xdr.Uint32(acctSeq + 100), + }, + }, + TxProcessing: txProcessing, + TxSet: xdr.GeneralizedTransactionSet{ + V: 1, + V1TxSet: &xdr.TransactionSetV1{ + PreviousLedgerHash: xdr.Hash{1}, + Phases: []xdr.TransactionPhase{ + { + V: 0, + V0Components: &components, + }, + }, + }, + }, + }, + } +} + +func txEnvelope(acctSeq uint32) xdr.TransactionEnvelope { + envelope, err := xdr.NewTransactionEnvelope(xdr.EnvelopeTypeEnvelopeTypeTx, xdr.TransactionV1Envelope{ + Tx: xdr.Transaction{ + Fee: 1, + SeqNum: xdr.SequenceNumber(acctSeq), + SourceAccount: xdr.MustMuxedAddress("MA7QYNF7SOWQ3GLR2BGMZEHXAVIRZA4KVWLTJJFC7MGXUA74P7UJVAAAAAAAAAAAAAJLK"), + }, + }) + if err != nil { + panic(err) + } + return envelope +} + +func TestGetTransaction(t *testing.T) { + store := transactions.NewMemoryStore(interfaces.MakeNoOpDeamon(), "passphrase", 100) + _, err := GetTransaction(store, GetTransactionRequest{"ab"}) + require.EqualError(t, err, "[-32602] unexpected hash length (2)") + _, err = GetTransaction(store, GetTransactionRequest{"foo "}) + require.EqualError(t, err, "[-32602] incorrect hash: encoding/hex: invalid byte: U+006F 'o'") + + hash := "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + tx, err := GetTransaction(store, GetTransactionRequest{hash}) + require.NoError(t, err) + require.Equal(t, GetTransactionResponse{ + Status: TransactionStatusNotFound, + }, tx) + + meta := txMeta(1, true) + err = store.IngestTransactions(meta) + require.NoError(t, err) + + xdrHash := txHash(1) + hash = hex.EncodeToString(xdrHash[:]) + tx, err = GetTransaction(store, GetTransactionRequest{hash}) + require.NoError(t, err) + + expectedTxResult, err := xdr.MarshalBase64(meta.V1.TxProcessing[0].Result.Result) + require.NoError(t, err) + expectedEnvelope, err := xdr.MarshalBase64(txEnvelope(1)) + require.NoError(t, err) + expectedTxMeta, err := xdr.MarshalBase64(meta.V1.TxProcessing[0].TxApplyProcessing) + require.NoError(t, err) + require.Equal(t, GetTransactionResponse{ + Status: TransactionStatusSuccess, + LatestLedger: 101, + LatestLedgerCloseTime: 2625, + OldestLedger: 101, + OldestLedgerCloseTime: 2625, + ApplicationOrder: 1, + FeeBump: false, + EnvelopeXdr: expectedEnvelope, + ResultXdr: expectedTxResult, + ResultMetaXdr: expectedTxMeta, + Ledger: 101, + LedgerCloseTime: 2625, + }, tx) + + // ingest another (failed) transaction + meta = txMeta(2, false) + err = store.IngestTransactions(meta) + require.NoError(t, err) + + // the first transaction should still be there + tx, err = GetTransaction(store, GetTransactionRequest{hash}) + require.NoError(t, err) + require.Equal(t, GetTransactionResponse{ + Status: TransactionStatusSuccess, + LatestLedger: 102, + LatestLedgerCloseTime: 2650, + OldestLedger: 101, + OldestLedgerCloseTime: 2625, + ApplicationOrder: 1, + FeeBump: false, + EnvelopeXdr: expectedEnvelope, + ResultXdr: expectedTxResult, + ResultMetaXdr: expectedTxMeta, + Ledger: 101, + LedgerCloseTime: 2625, + }, tx) + + // the new transaction should also be there + xdrHash = txHash(2) + hash = hex.EncodeToString(xdrHash[:]) + + expectedTxResult, err = xdr.MarshalBase64(meta.V1.TxProcessing[0].Result.Result) + require.NoError(t, err) + expectedEnvelope, err = xdr.MarshalBase64(txEnvelope(2)) + require.NoError(t, err) + expectedTxMeta, err = xdr.MarshalBase64(meta.V1.TxProcessing[0].TxApplyProcessing) + require.NoError(t, err) + + tx, err = GetTransaction(store, GetTransactionRequest{hash}) + require.NoError(t, err) + require.NoError(t, err) + require.Equal(t, GetTransactionResponse{ + Status: TransactionStatusFailed, + LatestLedger: 102, + LatestLedgerCloseTime: 2650, + OldestLedger: 101, + OldestLedgerCloseTime: 2625, + ApplicationOrder: 1, + FeeBump: false, + EnvelopeXdr: expectedEnvelope, + ResultXdr: expectedTxResult, + ResultMetaXdr: expectedTxMeta, + Ledger: 102, + LedgerCloseTime: 2650, + }, tx) +} diff --git a/cmd/soroban-rpc/internal/methods/health.go b/cmd/soroban-rpc/internal/methods/health.go new file mode 100644 index 00000000..ab51cc78 --- /dev/null +++ b/cmd/soroban-rpc/internal/methods/health.go @@ -0,0 +1,40 @@ +package methods + +import ( + "context" + "fmt" + "time" + + "github.com/creachadair/jrpc2" + "github.com/creachadair/jrpc2/handler" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/transactions" +) + +type HealthCheckResult struct { + Status string `json:"status"` +} + +// NewHealthCheck returns a health check json rpc handler +func NewHealthCheck(txStore *transactions.MemoryStore, maxHealthyLedgerLatency time.Duration) jrpc2.Handler { + return handler.New(func(ctx context.Context) (HealthCheckResult, error) { + ledgerInfo := txStore.GetLatestLedger() + if ledgerInfo.Sequence < 1 { + return HealthCheckResult{}, jrpc2.Error{ + Code: jrpc2.InternalError, + Message: "data stores are not initialized", + } + } + lastKnownLedgerCloseTime := time.Unix(ledgerInfo.CloseTime, 0) + lastKnownLedgerLatency := time.Since(lastKnownLedgerCloseTime) + if lastKnownLedgerLatency > maxHealthyLedgerLatency { + roundedLatency := lastKnownLedgerLatency.Round(time.Second) + msg := fmt.Sprintf("latency (%s) since last known ledger closed is too high (>%s)", roundedLatency, maxHealthyLedgerLatency) + return HealthCheckResult{}, jrpc2.Error{ + Code: jrpc2.InternalError, + Message: msg, + } + } + return HealthCheckResult{Status: "healthy"}, nil + }) +} diff --git a/cmd/soroban-rpc/internal/methods/send_transaction.go b/cmd/soroban-rpc/internal/methods/send_transaction.go new file mode 100644 index 00000000..c8a0ff84 --- /dev/null +++ b/cmd/soroban-rpc/internal/methods/send_transaction.go @@ -0,0 +1,132 @@ +package methods + +import ( + "context" + "encoding/hex" + + "github.com/creachadair/jrpc2" + "github.com/creachadair/jrpc2/handler" + "github.com/stellar/go/network" + proto "github.com/stellar/go/protocols/stellarcore" + "github.com/stellar/go/support/log" + "github.com/stellar/go/xdr" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/daemon/interfaces" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/transactions" +) + +// SendTransactionResponse represents the transaction submission response returned Soroban-RPC +type SendTransactionResponse struct { + // ErrorResultXDR is present only if Status is equal to proto.TXStatusError. + // ErrorResultXDR is a TransactionResult xdr string which contains details on why + // the transaction could not be accepted by stellar-core. + ErrorResultXDR string `json:"errorResultXdr,omitempty"` + // DiagnosticEventsXDR is present only if Status is equal to proto.TXStatusError. + // DiagnosticEventsXDR is a base64-encoded slice of xdr.DiagnosticEvent + DiagnosticEventsXDR []string `json:"diagnosticEventsXdr,omitempty"` + // Status represents the status of the transaction submission returned by stellar-core. + // Status can be one of: proto.TXStatusPending, proto.TXStatusDuplicate, + // proto.TXStatusTryAgainLater, or proto.TXStatusError. + Status string `json:"status"` + // Hash is a hash of the transaction which can be used to look up whether + // the transaction was included in the ledger. + Hash string `json:"hash"` + // LatestLedger is the latest ledger known to Soroban-RPC at the time it handled + // the transaction submission request. + LatestLedger uint32 `json:"latestLedger"` + // LatestLedgerCloseTime is the unix timestamp of the close time of the latest ledger known to + // Soroban-RPC at the time it handled the transaction submission request. + LatestLedgerCloseTime int64 `json:"latestLedgerCloseTime,string"` +} + +// SendTransactionRequest is the Soroban-RPC request to submit a transaction. +type SendTransactionRequest struct { + // Transaction is the base64 encoded transaction envelope. + Transaction string `json:"transaction"` +} + +// LatestLedgerStore is a store which returns the latest ingested ledger. +type LatestLedgerStore interface { + // GetLatestLedger returns the latest ingested ledger. + GetLatestLedger() transactions.LedgerInfo +} + +// NewSendTransactionHandler returns a submit transaction json rpc handler +func NewSendTransactionHandler(daemon interfaces.Daemon, logger *log.Entry, store LatestLedgerStore, passphrase string) jrpc2.Handler { + submitter := daemon.CoreClient() + return handler.New(func(ctx context.Context, request SendTransactionRequest) (SendTransactionResponse, error) { + var envelope xdr.TransactionEnvelope + err := xdr.SafeUnmarshalBase64(request.Transaction, &envelope) + if err != nil { + return SendTransactionResponse{}, &jrpc2.Error{ + Code: jrpc2.InvalidParams, + Message: "invalid_xdr", + } + } + + var hash [32]byte + hash, err = network.HashTransactionInEnvelope(envelope, passphrase) + if err != nil { + return SendTransactionResponse{}, &jrpc2.Error{ + Code: jrpc2.InvalidParams, + Message: "invalid_hash", + } + } + txHash := hex.EncodeToString(hash[:]) + + ledgerInfo := store.GetLatestLedger() + resp, err := submitter.SubmitTransaction(ctx, request.Transaction) + if err != nil { + logger.WithError(err). + WithField("tx", request.Transaction).Error("could not submit transaction") + return SendTransactionResponse{}, &jrpc2.Error{ + Code: jrpc2.InternalError, + Message: "could not submit transaction to stellar-core", + } + } + + // interpret response + if resp.IsException() { + logger.WithField("exception", resp.Exception). + WithField("tx", request.Transaction).Error("received exception from stellar core") + return SendTransactionResponse{}, &jrpc2.Error{ + Code: jrpc2.InternalError, + Message: "received exception from stellar-core", + } + } + + switch resp.Status { + case proto.TXStatusError: + events, err := proto.DiagnosticEventsToSlice(resp.DiagnosticEvents) + if err != nil { + logger.WithField("tx", request.Transaction).Error("Cannot decode diagnostic events:", err) + return SendTransactionResponse{}, &jrpc2.Error{ + Code: jrpc2.InternalError, + Message: "could not decode diagnostic events", + } + } + return SendTransactionResponse{ + ErrorResultXDR: resp.Error, + DiagnosticEventsXDR: events, + Status: resp.Status, + Hash: txHash, + LatestLedger: ledgerInfo.Sequence, + LatestLedgerCloseTime: ledgerInfo.CloseTime, + }, nil + case proto.TXStatusPending, proto.TXStatusDuplicate, proto.TXStatusTryAgainLater: + return SendTransactionResponse{ + Status: resp.Status, + Hash: txHash, + LatestLedger: ledgerInfo.Sequence, + LatestLedgerCloseTime: ledgerInfo.CloseTime, + }, nil + default: + logger.WithField("status", resp.Status). + WithField("tx", request.Transaction).Error("Unrecognized stellar-core status response") + return SendTransactionResponse{}, &jrpc2.Error{ + Code: jrpc2.InternalError, + Message: "invalid status from stellar-core", + } + } + }) +} diff --git a/cmd/soroban-rpc/internal/methods/simulate_transaction.go b/cmd/soroban-rpc/internal/methods/simulate_transaction.go new file mode 100644 index 00000000..a278c9c2 --- /dev/null +++ b/cmd/soroban-rpc/internal/methods/simulate_transaction.go @@ -0,0 +1,189 @@ +package methods + +import ( + "context" + "encoding/base64" + "fmt" + + "github.com/creachadair/jrpc2" + "github.com/creachadair/jrpc2/handler" + "github.com/stellar/go/support/log" + "github.com/stellar/go/xdr" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/db" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/preflight" +) + +type SimulateTransactionRequest struct { + Transaction string `json:"transaction"` + ResourceConfig *preflight.ResourceConfig `json:"resourceConfig,omitempty"` +} + +type SimulateTransactionCost struct { + CPUInstructions uint64 `json:"cpuInsns,string"` + MemoryBytes uint64 `json:"memBytes,string"` +} + +// SimulateHostFunctionResult contains the simulation result of each HostFunction within the single InvokeHostFunctionOp allowed in a Transaction +type SimulateHostFunctionResult struct { + Auth []string `json:"auth"` + XDR string `json:"xdr"` +} + +type RestorePreamble struct { + TransactionData string `json:"transactionData"` // SorobanTransactionData XDR in base64 + MinResourceFee int64 `json:"minResourceFee,string"` +} + +type SimulateTransactionResponse struct { + Error string `json:"error,omitempty"` + TransactionData string `json:"transactionData,omitempty"` // SorobanTransactionData XDR in base64 + MinResourceFee int64 `json:"minResourceFee,string,omitempty"` + Events []string `json:"events,omitempty"` // DiagnosticEvent XDR in base64 + Results []SimulateHostFunctionResult `json:"results,omitempty"` // an array of the individual host function call results + Cost SimulateTransactionCost `json:"cost,omitempty"` // the effective cpu and memory cost of the invoked transaction execution. + RestorePreamble *RestorePreamble `json:"restorePreamble,omitempty"` // If present, it indicates that a prior RestoreFootprint is required + LatestLedger uint32 `json:"latestLedger"` +} + +type PreflightGetter interface { + GetPreflight(ctx context.Context, params preflight.PreflightGetterParameters) (preflight.Preflight, error) +} + +// NewSimulateTransactionHandler returns a json rpc handler to run preflight simulations +func NewSimulateTransactionHandler(logger *log.Entry, ledgerEntryReader db.LedgerEntryReader, ledgerReader db.LedgerReader, getter PreflightGetter) jrpc2.Handler { + + return handler.New(func(ctx context.Context, request SimulateTransactionRequest) SimulateTransactionResponse { + var txEnvelope xdr.TransactionEnvelope + if err := xdr.SafeUnmarshalBase64(request.Transaction, &txEnvelope); err != nil { + logger.WithError(err).WithField("request", request). + Info("could not unmarshal simulate transaction envelope") + return SimulateTransactionResponse{ + Error: "Could not unmarshal transaction", + } + } + if len(txEnvelope.Operations()) != 1 { + return SimulateTransactionResponse{ + Error: "Transaction contains more than one operation", + } + } + op := txEnvelope.Operations()[0] + + var sourceAccount xdr.AccountId + if opSourceAccount := op.SourceAccount; opSourceAccount != nil { + sourceAccount = opSourceAccount.ToAccountId() + } else { + sourceAccount = txEnvelope.SourceAccount().ToAccountId() + } + + footprint := xdr.LedgerFootprint{} + switch op.Body.Type { + case xdr.OperationTypeInvokeHostFunction: + case xdr.OperationTypeExtendFootprintTtl, xdr.OperationTypeRestoreFootprint: + if txEnvelope.Type != xdr.EnvelopeTypeEnvelopeTypeTx && txEnvelope.V1.Tx.Ext.V != 1 { + return SimulateTransactionResponse{ + Error: "To perform a SimulateTransaction for ExtendFootprintTtl or RestoreFootprint operations, SorobanTransactionData must be provided", + } + } + footprint = txEnvelope.V1.Tx.Ext.SorobanData.Resources.Footprint + default: + return SimulateTransactionResponse{ + Error: "Transaction contains unsupported operation type: " + op.Body.Type.String(), + } + } + + readTx, err := ledgerEntryReader.NewCachedTx(ctx) + if err != nil { + return SimulateTransactionResponse{ + Error: "Cannot create read transaction", + } + } + defer func() { + _ = readTx.Done() + }() + latestLedger, err := readTx.GetLatestLedgerSequence() + if err != nil { + return SimulateTransactionResponse{ + Error: err.Error(), + } + } + bucketListSize, err := getBucketListSize(ctx, ledgerReader, latestLedger) + if err != nil { + return SimulateTransactionResponse{ + Error: err.Error(), + } + } + + resource_config := preflight.DefaultResourceConfig() + if request.ResourceConfig != nil { + resource_config = *request.ResourceConfig + } + params := preflight.PreflightGetterParameters{ + LedgerEntryReadTx: readTx, + BucketListSize: bucketListSize, + SourceAccount: sourceAccount, + OperationBody: op.Body, + Footprint: footprint, + ResourceConfig: resource_config, + } + result, err := getter.GetPreflight(ctx, params) + if err != nil { + return SimulateTransactionResponse{ + Error: err.Error(), + LatestLedger: latestLedger, + } + } + + var results []SimulateHostFunctionResult + if len(result.Result) != 0 { + results = append(results, SimulateHostFunctionResult{ + XDR: base64.StdEncoding.EncodeToString(result.Result), + Auth: base64EncodeSlice(result.Auth), + }) + } + var restorePreamble *RestorePreamble = nil + if len(result.PreRestoreTransactionData) != 0 { + restorePreamble = &RestorePreamble{ + TransactionData: base64.StdEncoding.EncodeToString(result.PreRestoreTransactionData), + MinResourceFee: result.PreRestoreMinFee, + } + } + + return SimulateTransactionResponse{ + Error: result.Error, + Results: results, + Events: base64EncodeSlice(result.Events), + TransactionData: base64.StdEncoding.EncodeToString(result.TransactionData), + MinResourceFee: result.MinFee, + Cost: SimulateTransactionCost{ + CPUInstructions: result.CPUInstructions, + MemoryBytes: result.MemoryBytes, + }, + LatestLedger: latestLedger, + RestorePreamble: restorePreamble, + } + }) +} + +func base64EncodeSlice(in [][]byte) []string { + result := make([]string, len(in)) + for i, v := range in { + result[i] = base64.StdEncoding.EncodeToString(v) + } + return result +} + +func getBucketListSize(ctx context.Context, ledgerReader db.LedgerReader, latestLedger uint32) (uint64, error) { + // obtain bucket size + var closeMeta, ok, err = ledgerReader.GetLedger(ctx, latestLedger) + if err != nil { + return 0, err + } + if !ok { + return 0, fmt.Errorf("missing meta for latest ledger (%d)", latestLedger) + } + if closeMeta.V != 1 { + return 0, fmt.Errorf("latest ledger (%d) meta has unexpected verion (%d)", latestLedger, closeMeta.V) + } + return uint64(closeMeta.V1.TotalByteSizeOfBucketList), nil +} diff --git a/cmd/soroban-rpc/internal/network/backlogQ.go b/cmd/soroban-rpc/internal/network/backlogQ.go new file mode 100644 index 00000000..6a8f691b --- /dev/null +++ b/cmd/soroban-rpc/internal/network/backlogQ.go @@ -0,0 +1,128 @@ +package network + +import ( + "context" + "net/http" + "sync/atomic" + + "github.com/creachadair/jrpc2" + "github.com/stellar/go/support/errors" + "github.com/stellar/go/support/log" +) + +const RequestBacklogQueueNoLimit = maxUint + +// The gauge is a subset of prometheus.Gauge, and it allows us to mock the +// gauge usage for testing purposes without requiring the implementation of the true +// prometheus.Gauge. +type gauge interface { + Inc() + Dec() +} + +type backlogQLimiter struct { + limit uint64 + pending uint64 + gauge gauge + limitReached uint64 + logger *log.Entry +} + +type backlogHTTPQLimiter struct { + httpDownstreamHandler http.Handler + backlogQLimiter +} + +func MakeHTTPBacklogQueueLimiter(downstream http.Handler, gauge gauge, limit uint64, logger *log.Entry) *backlogHTTPQLimiter { + return &backlogHTTPQLimiter{ + httpDownstreamHandler: downstream, + backlogQLimiter: backlogQLimiter{ + limit: limit, + gauge: gauge, + logger: logger, + }, + } +} + +type backlogJrpcQLimiter struct { + jrpcDownstreamHandler jrpc2.Handler + backlogQLimiter +} + +func MakeJrpcBacklogQueueLimiter(downstream jrpc2.Handler, gauge gauge, limit uint64, logger *log.Entry) *backlogJrpcQLimiter { + return &backlogJrpcQLimiter{ + jrpcDownstreamHandler: downstream, + backlogQLimiter: backlogQLimiter{ + limit: limit, + gauge: gauge, + logger: logger, + }, + } +} + +func (q *backlogHTTPQLimiter) ServeHTTP(res http.ResponseWriter, req *http.Request) { + if q.limit == RequestBacklogQueueNoLimit { + // if specified max duration, pass-through + q.httpDownstreamHandler.ServeHTTP(res, req) + return + } + if newPending := atomic.AddUint64(&q.pending, 1); newPending > q.limit { + // we've reached our queue limit - let the caller know we're too busy. + atomic.AddUint64(&q.pending, ^uint64(0)) + res.WriteHeader(http.StatusServiceUnavailable) + if atomic.CompareAndSwapUint64(&q.limitReached, 0, 1) { + // if the limit was reached, log a message. + if q.logger != nil { + q.logger.Infof("Backlog queue limiter reached the queue limit of %d executing concurrent http requests.", q.limit) + } + } + return + } else { + if q.gauge != nil { + q.gauge.Inc() + } + } + defer func() { + + atomic.AddUint64(&q.pending, ^uint64(0)) + if q.gauge != nil { + q.gauge.Dec() + } + atomic.StoreUint64(&q.limitReached, 0) + }() + + q.httpDownstreamHandler.ServeHTTP(res, req) +} + +func (q *backlogJrpcQLimiter) Handle(ctx context.Context, req *jrpc2.Request) (interface{}, error) { + if q.limit == RequestBacklogQueueNoLimit { + // if specified max duration, pass-through + return q.jrpcDownstreamHandler(ctx, req) + } + + if newPending := atomic.AddUint64(&q.pending, 1); newPending > q.limit { + // we've reached our queue limit - let the caller know we're too busy. + atomic.AddUint64(&q.pending, ^uint64(0)) + if atomic.CompareAndSwapUint64(&q.limitReached, 0, 1) { + // if the limit was reached, log a message. + if q.logger != nil { + q.logger.Infof("Backlog queue limiter reached the queue limit of %d executing concurrent rpc %s requests.", q.limit, req.Method()) + } + } + return nil, errors.Errorf("rpc queue for %s surpassed queue limit of %d requests", req.Method(), q.limit) + } else { + if q.gauge != nil { + q.gauge.Inc() + } + } + + defer func() { + atomic.AddUint64(&q.pending, ^uint64(0)) + if q.gauge != nil { + q.gauge.Dec() + } + atomic.StoreUint64(&q.limitReached, 0) + }() + + return q.jrpcDownstreamHandler(ctx, req) +} diff --git a/cmd/soroban-rpc/internal/network/backlogQ_test.go b/cmd/soroban-rpc/internal/network/backlogQ_test.go new file mode 100644 index 00000000..3fb05959 --- /dev/null +++ b/cmd/soroban-rpc/internal/network/backlogQ_test.go @@ -0,0 +1,237 @@ +package network + +import ( + "context" + "math/rand" + "net/http" + "sync" + "sync/atomic" + "testing" + + "github.com/creachadair/jrpc2" + "github.com/stretchr/testify/require" +) + +type TestingHandlerWrapper struct { + f func(http.ResponseWriter, *http.Request) +} + +func (t *TestingHandlerWrapper) ServeHTTP(res http.ResponseWriter, req *http.Request) { + t.f(res, req) +} + +type TestingJrpcHandlerWrapper struct { + f func(context.Context, *jrpc2.Request) (interface{}, error) +} + +func (t *TestingJrpcHandlerWrapper) Handle(ctx context.Context, req *jrpc2.Request) (interface{}, error) { + return t.f(ctx, req) +} + +// The goal of the TestBacklogQueueLimiter_HttpNonBlocking is to try +// and enquque load against the queue limiter, without hitting the +// limit. All request should pass through. +func TestBacklogQueueLimiter_HttpNonBlocking(t *testing.T) { + var sum uint64 + var wg sync.WaitGroup + requestsSizeLimit := uint64(1000) + adding := &TestingHandlerWrapper{f: func(res http.ResponseWriter, req *http.Request) { + atomic.AddUint64(&sum, 1) + }} + + logCounter := makeTestLogCounter() + testGauge := &TestingGauge{} + limiter := MakeHTTPBacklogQueueLimiter(adding, testGauge, requestsSizeLimit, logCounter.Entry()) + for i := 1; i < 50; i++ { + n := rand.Int63n(int64(requestsSizeLimit)) //nolint:gosec + require.Zero(t, int(testGauge.count)) + wg.Add(int(n)) + for k := n; k > 0; k-- { + go func() { + limiter.ServeHTTP(nil, nil) + wg.Done() + }() + } + wg.Wait() + require.Equal(t, uint64(n), sum) + require.Zero(t, int(testGauge.count)) + sum = 0 + } + require.Equal(t, [7]int{0, 0, 0, 0, 0, 0, 0}, logCounter.writtenLogEntries) +} + +// The goal of the TestBacklogQueueLimiter_HttpNonBlocking is to try +// and enquque load against the queue limiter, without hitting the +// limit. All request should pass through. +func TestBacklogQueueLimiter_JrpcNonBlocking(t *testing.T) { + var sum uint64 + var wg sync.WaitGroup + requestsSizeLimit := uint64(1000) + adding := &TestingJrpcHandlerWrapper{f: func(context.Context, *jrpc2.Request) (interface{}, error) { + atomic.AddUint64(&sum, 1) + return nil, nil + }} + logCounter := makeTestLogCounter() + testGauge := &TestingGauge{} + limiter := MakeJrpcBacklogQueueLimiter(adding.Handle, testGauge, requestsSizeLimit, logCounter.Entry()) + for i := 1; i < 50; i++ { + n := rand.Int63n(int64(requestsSizeLimit)) //nolint:gosec + require.Zero(t, int(testGauge.count)) + wg.Add(int(n)) + for k := n; k > 0; k-- { + go func() { + _, err := limiter.Handle(context.Background(), nil) + require.Nil(t, err) + wg.Done() + }() + } + wg.Wait() + require.Zero(t, int(testGauge.count)) + require.Equal(t, uint64(n), sum) + sum = 0 + } + require.Equal(t, [7]int{0, 0, 0, 0, 0, 0, 0}, logCounter.writtenLogEntries) +} + +// The goal of the TestBacklogQueueLimiter_HttpBlocking is to set +// up a queue that already reached it's limit and see that +// additional requests are being rejected. Then, unblock the queue +// and see that requests could go though. +func TestBacklogQueueLimiter_HttpBlocking(t *testing.T) { + for _, queueSize := range []uint64{7, 50, 80} { + blockedCh := make(chan interface{}) + var initialGroupBlocking sync.WaitGroup + initialGroupBlocking.Add(int(queueSize) / 2) + blockedHandlers := &TestingHandlerWrapper{f: func(res http.ResponseWriter, req *http.Request) { + initialGroupBlocking.Done() + <-blockedCh + }} + logCounter := makeTestLogCounter() + testGauge := &TestingGauge{} + limiter := MakeHTTPBacklogQueueLimiter(blockedHandlers, testGauge, queueSize, logCounter.Entry()) + for i := uint64(0); i < queueSize/2; i++ { + go func() { + limiter.ServeHTTP(nil, nil) + initialGroupBlocking.Done() + }() + } + + initialGroupBlocking.Wait() + require.Equal(t, int(queueSize)/2, int(testGauge.count)) + + var secondBlockingGroupWg sync.WaitGroup + secondBlockingGroupWg.Add(int(queueSize) - int(queueSize)/2) + secondBlockingGroupWgCh := make(chan interface{}) + secondBlockingGroupWgHandlers := &TestingHandlerWrapper{f: func(res http.ResponseWriter, req *http.Request) { + secondBlockingGroupWg.Done() + <-secondBlockingGroupWgCh + }} + + limiter.httpDownstreamHandler = secondBlockingGroupWgHandlers + for i := queueSize / 2; i < queueSize; i++ { + go func() { + limiter.ServeHTTP(nil, nil) + secondBlockingGroupWg.Done() + }() + } + + secondBlockingGroupWg.Wait() + require.Equal(t, [7]int{0, 0, 0, 0, 0, 0, 0}, logCounter.writtenLogEntries) + require.Equal(t, int(queueSize), int(testGauge.count)) + // now, try to place additional entry - which should be blocked. + var res TestingResponseWriter + limiter.ServeHTTP(&res, nil) + require.Equal(t, http.StatusServiceUnavailable, res.statusCode) + require.Equal(t, [7]int{0, 0, 0, 0, 1, 0, 0}, logCounter.writtenLogEntries) + require.Equal(t, int(queueSize), int(testGauge.count)) + + secondBlockingGroupWg.Add(int(queueSize) - int(queueSize)/2) + // unblock the second group. + close(secondBlockingGroupWgCh) + secondBlockingGroupWg.Wait() + require.Equal(t, int(queueSize)/2, int(testGauge.count)) + + // see that we have no blocking + res = TestingResponseWriter{} + require.Equal(t, 0, res.statusCode) + + // unblock the first group. + initialGroupBlocking.Add(int(queueSize) / 2) + close(blockedCh) + initialGroupBlocking.Wait() + require.Equal(t, [7]int{0, 0, 0, 0, 1, 0, 0}, logCounter.writtenLogEntries) + require.Zero(t, int(testGauge.count)) + } +} + +// The goal of the TestBacklogQueueLimiter_JrpcBlocking is to set +// up a queue that already reached it's limit and see that +// additional requests are being rejected. Then, unblock the queue +// and see that requests could go though. +func TestBacklogQueueLimiter_JrpcBlocking(t *testing.T) { + for _, queueSize := range []uint64{7, 50, 80} { + blockedCh := make(chan interface{}) + var initialGroupBlocking sync.WaitGroup + initialGroupBlocking.Add(int(queueSize) / 2) + blockedHandlers := &TestingJrpcHandlerWrapper{f: func(context.Context, *jrpc2.Request) (interface{}, error) { + initialGroupBlocking.Done() + <-blockedCh + return nil, nil + }} + logCounter := makeTestLogCounter() + testGauge := &TestingGauge{} + limiter := MakeJrpcBacklogQueueLimiter(blockedHandlers.Handle, testGauge, queueSize, logCounter.Entry()) + for i := uint64(0); i < queueSize/2; i++ { + go func() { + _, err := limiter.Handle(context.Background(), &jrpc2.Request{}) + require.Nil(t, err) + initialGroupBlocking.Done() + }() + } + initialGroupBlocking.Wait() + require.Equal(t, int(queueSize)/2, int(testGauge.count)) + + var secondBlockingGroupWg sync.WaitGroup + secondBlockingGroupWg.Add(int(queueSize) - int(queueSize)/2) + secondBlockingGroupWgCh := make(chan interface{}) + secondBlockingGroupWgHandlers := &TestingJrpcHandlerWrapper{f: func(context.Context, *jrpc2.Request) (interface{}, error) { + secondBlockingGroupWg.Done() + <-secondBlockingGroupWgCh + return nil, nil + }} + + limiter.jrpcDownstreamHandler = secondBlockingGroupWgHandlers.Handle + for i := queueSize / 2; i < queueSize; i++ { + go func() { + _, err := limiter.Handle(context.Background(), &jrpc2.Request{}) + require.Nil(t, err) + secondBlockingGroupWg.Done() + }() + } + secondBlockingGroupWg.Wait() + require.Equal(t, [7]int{0, 0, 0, 0, 0, 0, 0}, logCounter.writtenLogEntries) + require.Equal(t, int(queueSize), int(testGauge.count)) + // now, try to place additional entry - which should be blocked. + var res TestingResponseWriter + _, err := limiter.Handle(context.Background(), &jrpc2.Request{}) + require.NotNil(t, err) + require.Equal(t, [7]int{0, 0, 0, 0, 1, 0, 0}, logCounter.writtenLogEntries) + + secondBlockingGroupWg.Add(int(queueSize) - int(queueSize)/2) + // unblock the second group. + close(secondBlockingGroupWgCh) + secondBlockingGroupWg.Wait() + require.Equal(t, int(queueSize)/2, int(testGauge.count)) + + // see that we have no blocking + res = TestingResponseWriter{} + require.Equal(t, 0, res.statusCode) + + // unblock the first group. + initialGroupBlocking.Add(int(queueSize) / 2) + close(blockedCh) + initialGroupBlocking.Wait() + require.Equal(t, [7]int{0, 0, 0, 0, 1, 0, 0}, logCounter.writtenLogEntries) + require.Zero(t, int(testGauge.count)) + } +} diff --git a/cmd/soroban-rpc/internal/network/requestdurationlimiter.go b/cmd/soroban-rpc/internal/network/requestdurationlimiter.go new file mode 100644 index 00000000..05204591 --- /dev/null +++ b/cmd/soroban-rpc/internal/network/requestdurationlimiter.go @@ -0,0 +1,304 @@ +package network + +import ( + "context" + "net/http" + "reflect" + "runtime" + "time" + + "github.com/creachadair/jrpc2" + "github.com/stellar/go/support/log" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/util" +) + +const maxUint = ^uint64(0) //18446744073709551615 +const maxInt = int64(maxUint >> 1) // 9223372036854775807 +const maxDuration = time.Duration(maxInt) + +const RequestDurationLimiterNoLimit = maxDuration + +// The increasingCounter is a subset of prometheus.Counter, and it allows us to mock the +// counter usage for testing purposes without requiring the implementation of the true +// prometheus.Counter. +type increasingCounter interface { + // Inc increments the counter by 1. Use Add to increment it by arbitrary + // non-negative values. + Inc() +} + +type requestDurationLimiter struct { + warningThreshold time.Duration + limitThreshold time.Duration + logger *log.Entry + warningCounter increasingCounter + limitCounter increasingCounter +} + +type httpRequestDurationLimiter struct { + httpDownstreamHandler http.Handler + requestDurationLimiter +} + +func MakeHTTPRequestDurationLimiter( + downstream http.Handler, + warningThreshold time.Duration, + limitThreshold time.Duration, + warningCounter increasingCounter, + limitCounter increasingCounter, + logger *log.Entry) *httpRequestDurationLimiter { + // make sure the warning threshold is less then the limit threshold; otherwise, just set it to the limit threshold. + if warningThreshold > limitThreshold { + warningThreshold = limitThreshold + } + return &httpRequestDurationLimiter{ + httpDownstreamHandler: downstream, + requestDurationLimiter: requestDurationLimiter{ + warningThreshold: warningThreshold, + limitThreshold: limitThreshold, + logger: logger, + warningCounter: warningCounter, + limitCounter: limitCounter, + }, + } +} + +type bufferedResponseWriter struct { + header http.Header + buffer []byte + statusCode int +} + +func makeBufferedResponseWriter(rw http.ResponseWriter) *bufferedResponseWriter { + header := rw.Header() + bw := &bufferedResponseWriter{ + header: make(http.Header, 0), + } + for k, v := range header { + bw.header[k] = v + } + return bw +} + +func (w *bufferedResponseWriter) Header() http.Header { + return w.header +} +func (w *bufferedResponseWriter) Write(buf []byte) (int, error) { + w.buffer = append(w.buffer, buf...) + return len(buf), nil +} +func (w *bufferedResponseWriter) WriteHeader(statusCode int) { + w.statusCode = statusCode +} + +func (w *bufferedResponseWriter) WriteOut(ctx context.Context, rw http.ResponseWriter) { + // update the headers map. + headers := rw.Header() + for k := range headers { + delete(headers, k) + } + for k, v := range w.header { + headers[k] = v + } + + if len(w.buffer) == 0 { + if w.statusCode != 0 { + rw.WriteHeader(w.statusCode) + } + return + } + if w.statusCode != 0 { + rw.WriteHeader(w.statusCode) + } + + if ctx.Err() == nil { + // the following return size/error won't help us much at this point. The request is already finalized. + rw.Write(w.buffer) //nolint:errcheck + } +} + +func (q *httpRequestDurationLimiter) ServeHTTP(res http.ResponseWriter, req *http.Request) { + if q.limitThreshold == RequestDurationLimiterNoLimit { + // if specified max duration, pass-through + q.httpDownstreamHandler.ServeHTTP(res, req) + return + } + var warningCh <-chan time.Time + if q.warningThreshold != time.Duration(0) && q.warningThreshold < q.limitThreshold { + warningCh = time.NewTimer(q.warningThreshold).C + } + var limitCh <-chan time.Time + if q.limitThreshold != time.Duration(0) { + limitCh = time.NewTimer(q.limitThreshold).C + } + requestCompleted := make(chan []string, 1) + requestCtx, requestCtxCancel := context.WithTimeout(req.Context(), q.limitThreshold) + defer requestCtxCancel() + timeLimitedRequest := req.WithContext(requestCtx) + responseBuffer := makeBufferedResponseWriter(res) + go func() { + defer func() { + if err := recover(); err != nil { + functionName := runtime.FuncForPC(reflect.ValueOf(q.httpDownstreamHandler.ServeHTTP).Pointer()).Name() + callStack := util.CallStack(err, functionName, "(*httpRequestDurationLimiter).ServeHTTP.func1()", 8) + requestCompleted <- callStack + } else { + close(requestCompleted) + } + }() + q.httpDownstreamHandler.ServeHTTP(responseBuffer, timeLimitedRequest) + }() + + warn := false + for { + select { + case <-warningCh: + // warn + warn = true + case <-limitCh: + // limit + requestCtxCancel() + if q.limitCounter != nil { + q.limitCounter.Inc() + } + if q.logger != nil { + q.logger.Infof("Request processing for %s exceed limiting threshold of %v", req.URL.Path, q.limitThreshold) + } + if req.Context().Err() == nil { + res.WriteHeader(http.StatusGatewayTimeout) + } + return + case errStrings := <-requestCompleted: + if warn { + if q.warningCounter != nil { + q.warningCounter.Inc() + } + if q.logger != nil { + q.logger.Infof("Request processing for %s exceed warning threshold of %v", req.URL.Path, q.warningThreshold) + } + } + if len(errStrings) == 0 { + responseBuffer.WriteOut(req.Context(), res) + } else { + res.WriteHeader(http.StatusInternalServerError) + for _, errStr := range errStrings { + if q.logger != nil { + q.logger.Warn(errStr) + } + } + } + return + } + } +} + +type rpcRequestDurationLimiter struct { + jrpcDownstreamHandler jrpc2.Handler + requestDurationLimiter +} + +func MakeJrpcRequestDurationLimiter( + downstream jrpc2.Handler, + warningThreshold time.Duration, + limitThreshold time.Duration, + warningCounter increasingCounter, + limitCounter increasingCounter, + logger *log.Entry) *rpcRequestDurationLimiter { + // make sure the warning threshold is less then the limit threshold; otherwise, just set it to the limit threshold. + if warningThreshold > limitThreshold { + warningThreshold = limitThreshold + } + + return &rpcRequestDurationLimiter{ + jrpcDownstreamHandler: downstream, + requestDurationLimiter: requestDurationLimiter{ + warningThreshold: warningThreshold, + limitThreshold: limitThreshold, + logger: logger, + warningCounter: warningCounter, + limitCounter: limitCounter, + }, + } +} + +func (q *rpcRequestDurationLimiter) Handle(ctx context.Context, req *jrpc2.Request) (interface{}, error) { + if q.limitThreshold == RequestDurationLimiterNoLimit { + // if specified max duration, pass-through + return q.jrpcDownstreamHandler(ctx, req) + } + var warningCh <-chan time.Time + if q.warningThreshold != time.Duration(0) && q.warningThreshold < q.limitThreshold { + warningCh = time.NewTimer(q.warningThreshold).C + } + var limitCh <-chan time.Time + if q.limitThreshold != time.Duration(0) { + limitCh = time.NewTimer(q.limitThreshold).C + } + type requestResultOutput struct { + data interface{} + err error + } + requestCompleted := make(chan requestResultOutput, 1) + requestCtx, requestCtxCancel := context.WithTimeout(ctx, q.limitThreshold) + defer requestCtxCancel() + + go func() { + defer func() { + if err := recover(); err != nil { + q.logger.Errorf("Request for method %s resulted in an error : %v", req.Method(), err) + } + close(requestCompleted) + }() + var res requestResultOutput + res.data, res.err = q.jrpcDownstreamHandler(requestCtx, req) + requestCompleted <- res + }() + + warn := false + for { + select { + case <-warningCh: + // warn + warn = true + case <-limitCh: + // limit + requestCtxCancel() + if q.limitCounter != nil { + q.limitCounter.Inc() + } + if q.logger != nil { + q.logger.Infof("Request processing for %s exceed limiting threshold of %v", req.Method(), q.limitThreshold) + } + if ctxErr := ctx.Err(); ctxErr == nil { + return nil, ErrRequestExceededProcessingLimitThreshold + } else { + return nil, ctxErr + } + case requestRes, ok := <-requestCompleted: + if warn { + if q.warningCounter != nil { + q.warningCounter.Inc() + } + if q.logger != nil { + q.logger.Infof("Request processing for %s exceed warning threshold of %v", req.Method(), q.warningThreshold) + } + } + if ok { + return requestRes.data, requestRes.err + } else { + // request panicked ? + return nil, ErrFailToProcessDueToInternalIssue + } + } + } +} + +var ErrRequestExceededProcessingLimitThreshold = jrpc2.Error{ + Code: -32001, + Message: "request exceeded processing limit threshold", +} + +var ErrFailToProcessDueToInternalIssue = jrpc2.Error{ + Code: -32003, // internal error + Message: "request failed to process due to internal issue", +} diff --git a/cmd/soroban-rpc/internal/network/requestdurationlimiter_test.go b/cmd/soroban-rpc/internal/network/requestdurationlimiter_test.go new file mode 100644 index 00000000..5be64cea --- /dev/null +++ b/cmd/soroban-rpc/internal/network/requestdurationlimiter_test.go @@ -0,0 +1,339 @@ +package network + +import ( + "context" + "io" + "net" + "net/http" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/creachadair/jrpc2" + "github.com/creachadair/jrpc2/handler" + "github.com/creachadair/jrpc2/jhttp" +) + +type TestServerHandlerWrapper struct { + f func(http.ResponseWriter, *http.Request) +} + +func (h *TestServerHandlerWrapper) ServeHTTP(res http.ResponseWriter, req *http.Request) { + h.f(res, req) +} + +func createTestServer() (serverAddr string, redirector *TestServerHandlerWrapper, shutdown context.CancelFunc) { + ipAddr, _ := net.ResolveTCPAddr("tcp", "127.0.0.1:0") + listener, _ := net.ListenTCP("tcp", ipAddr) + handlerRedirector := &TestServerHandlerWrapper{} + server := http.Server{ + Handler: handlerRedirector, + ReadHeaderTimeout: 10 * time.Second, + } + + serverDown := make(chan error) + go func() { + serverDown <- server.Serve(listener) + }() + + return listener.Addr().String(), handlerRedirector, func() { + server.Shutdown(context.Background()) //nolint:errcheck + <-serverDown + } +} + +func TestHTTPRequestDurationLimiter_Limiting(t *testing.T) { + addr, redirector, shutdown := createTestServer() + longExecutingHandler := &TestServerHandlerWrapper{ + f: func(res http.ResponseWriter, req *http.Request) { + select { + case <-req.Context().Done(): + return + case <-time.After(time.Second * 10): + } + n, err := res.Write([]byte{1, 2, 3}) + require.Equal(t, 3, n) + require.Nil(t, err) + }, + } + warningCounter := TestingCounter{} + limitCounter := TestingCounter{} + logCounter := makeTestLogCounter() + redirector.f = MakeHTTPRequestDurationLimiter( + longExecutingHandler, + time.Second/20, + time.Second/10, + &warningCounter, + &limitCounter, + logCounter.Entry()).ServeHTTP + + client := http.Client{} + req, err := http.NewRequestWithContext(context.Background(), http.MethodGet, "http://"+addr+"/", nil) + require.NoError(t, err) + resp, err := client.Do(req) + require.NoError(t, err) + bytes, err := io.ReadAll(resp.Body) + require.NoError(t, resp.Body.Close()) + require.NoError(t, err) + require.Equal(t, []byte{}, bytes) + require.Equal(t, resp.StatusCode, http.StatusGatewayTimeout) + require.Zero(t, warningCounter.count) + require.Equal(t, int64(1), limitCounter.count) + require.Equal(t, [7]int{0, 0, 0, 0, 1, 0, 0}, logCounter.writtenLogEntries) + shutdown() +} + +func TestHTTPRequestDurationLimiter_NoLimiting(t *testing.T) { + addr, redirector, shutdown := createTestServer() + longExecutingHandler := &TestServerHandlerWrapper{ + f: func(res http.ResponseWriter, req *http.Request) { + select { + case <-req.Context().Done(): + return + case <-time.After(time.Second / 10): + } + n, err := res.Write([]byte{1, 2, 3}) + require.Equal(t, 3, n) + require.Nil(t, err) + }, + } + warningCounter := TestingCounter{} + limitCounter := TestingCounter{} + logCounter := makeTestLogCounter() + redirector.f = MakeHTTPRequestDurationLimiter( + longExecutingHandler, + time.Second*5, + time.Second*10, + &warningCounter, + &limitCounter, + logCounter.Entry()).ServeHTTP + + client := http.Client{} + req, err := http.NewRequestWithContext(context.Background(), http.MethodGet, "http://"+addr+"/", nil) + require.NoError(t, err) + resp, err := client.Do(req) + require.NoError(t, err) + bytes, err := io.ReadAll(resp.Body) + require.NoError(t, resp.Body.Close()) + require.NoError(t, err) + require.Equal(t, []byte{1, 2, 3}, bytes) + require.Equal(t, resp.StatusCode, http.StatusOK) + require.Zero(t, warningCounter.count) + require.Zero(t, limitCounter.count) + require.Equal(t, [7]int{0, 0, 0, 0, 0, 0, 0}, logCounter.writtenLogEntries) + shutdown() +} + +func TestHTTPRequestDurationLimiter_NoLimiting_Warn(t *testing.T) { + addr, redirector, shutdown := createTestServer() + longExecutingHandler := &TestServerHandlerWrapper{ + f: func(res http.ResponseWriter, req *http.Request) { + select { + case <-req.Context().Done(): + return + case <-time.After(time.Second / 5): + } + n, err := res.Write([]byte{1, 2, 3}) + require.Equal(t, 3, n) + require.Nil(t, err) + }, + } + warningCounter := TestingCounter{} + limitCounter := TestingCounter{} + logCounter := makeTestLogCounter() + redirector.f = MakeHTTPRequestDurationLimiter( + longExecutingHandler, + time.Second/10, + time.Second*10, + &warningCounter, + &limitCounter, + logCounter.Entry()).ServeHTTP + + client := http.Client{} + req, err := http.NewRequestWithContext(context.Background(), http.MethodGet, "http://"+addr+"/", nil) + require.NoError(t, err) + resp, err := client.Do(req) + require.NoError(t, err) + bytes, err := io.ReadAll(resp.Body) + require.NoError(t, resp.Body.Close()) + require.NoError(t, err) + require.Equal(t, []byte{1, 2, 3}, bytes) + require.Equal(t, resp.StatusCode, http.StatusOK) + require.Equal(t, int64(1), warningCounter.count) + require.Zero(t, limitCounter.count) + require.Equal(t, [7]int{0, 0, 0, 0, 1, 0, 0}, logCounter.writtenLogEntries) + shutdown() +} + +type JRPCHandlerFunc func(ctx context.Context, r *jrpc2.Request) (interface{}, error) + +func bindRPCHoist(redirector *TestServerHandlerWrapper) *JRPCHandlerFunc { + var hoistFunction JRPCHandlerFunc + + bridgeMap := handler.Map{ + "method": handler.New(func(ctx context.Context, r *jrpc2.Request) (interface{}, error) { + return hoistFunction(ctx, r) + }), + } + + redirector.f = jhttp.NewBridge(bridgeMap, &jhttp.BridgeOptions{}).ServeHTTP + return &hoistFunction +} + +func TestJRPCRequestDurationLimiter_Limiting(t *testing.T) { + addr, redirector, shutdown := createTestServer() + hoistFunction := bindRPCHoist(redirector) + + longExecutingHandler := handler.New(func(ctx context.Context, r *jrpc2.Request) (interface{}, error) { + select { + case <-ctx.Done(): + return nil, ctx.Err() + case <-time.After(time.Second * 10): + } + return "", nil + }) + + warningCounter := TestingCounter{} + limitCounter := TestingCounter{} + logCounter := makeTestLogCounter() + *hoistFunction = MakeJrpcRequestDurationLimiter( + longExecutingHandler, + time.Second/20, + time.Second/10, + &warningCounter, + &limitCounter, + logCounter.Entry()).Handle + + ch := jhttp.NewChannel("http://"+addr+"/", nil) + client := jrpc2.NewClient(ch, nil) + + var res interface{} + req := struct { + i int + }{1} + err := client.CallResult(context.Background(), "method", req, &res) + require.NotNil(t, err) + jrpcError, ok := err.(*jrpc2.Error) + require.True(t, ok) + require.Equal(t, ErrRequestExceededProcessingLimitThreshold.Code, jrpcError.Code) + require.Equal(t, nil, res) + require.Zero(t, warningCounter.count) + require.Equal(t, int64(1), limitCounter.count) + require.Equal(t, [7]int{0, 0, 0, 0, 1, 0, 0}, logCounter.writtenLogEntries) + shutdown() +} + +func TestJRPCRequestDurationLimiter_NoLimiting(t *testing.T) { + addr, redirector, shutdown := createTestServer() + hoistFunction := bindRPCHoist(redirector) + + returnString := "ok" + longExecutingHandler := handler.New(func(ctx context.Context, r *jrpc2.Request) (interface{}, error) { + select { + case <-ctx.Done(): + return nil, ctx.Err() + case <-time.After(time.Second / 10): + } + return returnString, nil + }) + + warningCounter := TestingCounter{} + limitCounter := TestingCounter{} + logCounter := makeTestLogCounter() + *hoistFunction = MakeJrpcRequestDurationLimiter( + longExecutingHandler, + time.Second*5, + time.Second*10, + &warningCounter, + &limitCounter, + logCounter.Entry()).Handle + + ch := jhttp.NewChannel("http://"+addr+"/", nil) + client := jrpc2.NewClient(ch, nil) + + var res interface{} + req := struct { + i int + }{1} + err := client.CallResult(context.Background(), "method", req, &res) + require.Nil(t, err) + require.Equal(t, returnString, res) + require.Zero(t, warningCounter.count) + require.Zero(t, limitCounter.count) + require.Equal(t, [7]int{0, 0, 0, 0, 0, 0, 0}, logCounter.writtenLogEntries) + shutdown() +} + +func TestJRPCRequestDurationLimiter_NoLimiting_Warn(t *testing.T) { + addr, redirector, shutdown := createTestServer() + hoistFunction := bindRPCHoist(redirector) + + returnString := "ok" + longExecutingHandler := handler.New(func(ctx context.Context, r *jrpc2.Request) (interface{}, error) { + select { + case <-ctx.Done(): + return nil, ctx.Err() + case <-time.After(time.Second / 5): + } + return returnString, nil + }) + + warningCounter := TestingCounter{} + limitCounter := TestingCounter{} + logCounter := makeTestLogCounter() + *hoistFunction = MakeJrpcRequestDurationLimiter( + longExecutingHandler, + time.Second/10, + time.Second*10, + &warningCounter, + &limitCounter, + logCounter.Entry()).Handle + + ch := jhttp.NewChannel("http://"+addr+"/", nil) + client := jrpc2.NewClient(ch, nil) + + var res interface{} + req := struct { + i int + }{1} + err := client.CallResult(context.Background(), "method", req, &res) + require.Nil(t, err) + require.Equal(t, returnString, res) + require.Equal(t, int64(1), warningCounter.count) + require.Zero(t, limitCounter.count) + require.Equal(t, [7]int{0, 0, 0, 0, 1, 0, 0}, logCounter.writtenLogEntries) + shutdown() +} + +func TestHTTPRequestDurationLimiter_Panicing(t *testing.T) { + addr, redirector, shutdown := createTestServer() + longExecutingHandler := &TestServerHandlerWrapper{ + f: func(res http.ResponseWriter, req *http.Request) { + var panicWrite *int + *panicWrite = 1 + }, + } + + logCounter := makeTestLogCounter() + redirector.f = MakeHTTPRequestDurationLimiter( + longExecutingHandler, + time.Second*10, + time.Second*10, + nil, + nil, + logCounter.Entry()).ServeHTTP + + client := http.Client{} + req, err := http.NewRequestWithContext(context.Background(), http.MethodGet, "http://"+addr+"/", nil) + require.NoError(t, err) + resp, err := client.Do(req) + require.NoError(t, err) + bytes, err := io.ReadAll(resp.Body) + require.NoError(t, err) + require.NoError(t, resp.Body.Close()) + require.Equal(t, http.StatusInternalServerError, resp.StatusCode) + require.Equal(t, []byte{}, bytes) + require.Equal(t, [7]int{0, 0, 0, 7, 0, 0, 0}, logCounter.writtenLogEntries) + shutdown() +} diff --git a/cmd/soroban-rpc/internal/network/utils_test.go b/cmd/soroban-rpc/internal/network/utils_test.go new file mode 100644 index 00000000..8109a63d --- /dev/null +++ b/cmd/soroban-rpc/internal/network/utils_test.go @@ -0,0 +1,68 @@ +package network + +import ( + "net/http" + "sync/atomic" + + "github.com/sirupsen/logrus" + "github.com/stellar/go/support/log" +) + +type TestingCounter struct { + count int64 +} + +func (tc *TestingCounter) Inc() { + atomic.AddInt64(&tc.count, 1) +} + +type TestingGauge struct { + count int64 +} + +func (tg *TestingGauge) Inc() { + atomic.AddInt64(&tg.count, 1) +} + +func (tg *TestingGauge) Dec() { + atomic.AddInt64(&tg.count, -1) +} + +type TestLogsCounter struct { + entry *log.Entry + writtenLogEntries [logrus.TraceLevel + 1]int +} + +func makeTestLogCounter() *TestLogsCounter { + out := &TestLogsCounter{ + entry: log.New(), + } + out.entry.AddHook(out) + out.entry.SetLevel(logrus.DebugLevel) + return out +} +func (te *TestLogsCounter) Entry() *log.Entry { + return te.entry +} +func (te *TestLogsCounter) Levels() []logrus.Level { + return []logrus.Level{logrus.PanicLevel, logrus.FatalLevel, logrus.ErrorLevel, logrus.WarnLevel, logrus.InfoLevel, logrus.DebugLevel, logrus.TraceLevel} +} +func (te *TestLogsCounter) Fire(e *logrus.Entry) error { + te.writtenLogEntries[e.Level]++ + return nil +} + +type TestingResponseWriter struct { + statusCode int +} + +func (t *TestingResponseWriter) Header() http.Header { + return http.Header{} +} +func (t *TestingResponseWriter) Write([]byte) (int, error) { + return 0, nil +} + +func (t *TestingResponseWriter) WriteHeader(statusCode int) { + t.statusCode = statusCode +} diff --git a/cmd/soroban-rpc/internal/preflight/pool.go b/cmd/soroban-rpc/internal/preflight/pool.go new file mode 100644 index 00000000..1d182411 --- /dev/null +++ b/cmd/soroban-rpc/internal/preflight/pool.go @@ -0,0 +1,181 @@ +package preflight + +import ( + "context" + "errors" + "sync" + "sync/atomic" + "time" + + "github.com/prometheus/client_golang/prometheus" + "github.com/stellar/go/support/log" + "github.com/stellar/go/xdr" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/daemon/interfaces" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/db" +) + +type workerResult struct { + preflight Preflight + err error +} + +type workerRequest struct { + ctx context.Context + params PreflightParameters + resultChan chan<- workerResult +} + +type PreflightWorkerPool struct { + ledgerEntryReader db.LedgerEntryReader + networkPassphrase string + enableDebug bool + logger *log.Entry + isClosed atomic.Bool + requestChan chan workerRequest + concurrentRequestsMetric prometheus.Gauge + errorFullCounter prometheus.Counter + durationMetric *prometheus.SummaryVec + ledgerEntriesFetchedMetric prometheus.Summary + wg sync.WaitGroup +} + +func NewPreflightWorkerPool(daemon interfaces.Daemon, workerCount uint, jobQueueCapacity uint, enableDebug bool, ledgerEntryReader db.LedgerEntryReader, networkPassphrase string, logger *log.Entry) *PreflightWorkerPool { + preflightWP := PreflightWorkerPool{ + ledgerEntryReader: ledgerEntryReader, + networkPassphrase: networkPassphrase, + enableDebug: enableDebug, + logger: logger, + requestChan: make(chan workerRequest, jobQueueCapacity), + } + requestQueueMetric := prometheus.NewGaugeFunc(prometheus.GaugeOpts{ + Namespace: daemon.MetricsNamespace(), + Subsystem: "preflight_pool", + Name: "queue_length", + Help: "number of preflight requests in the queue", + }, func() float64 { + return float64(len(preflightWP.requestChan)) + }) + preflightWP.concurrentRequestsMetric = prometheus.NewGauge(prometheus.GaugeOpts{ + Namespace: daemon.MetricsNamespace(), + Subsystem: "preflight_pool", + Name: "concurrent_requests", + Help: "number of preflight requests currently running", + }) + preflightWP.errorFullCounter = prometheus.NewCounter(prometheus.CounterOpts{ + Namespace: daemon.MetricsNamespace(), + Subsystem: "preflight_pool", + Name: "queue_full_errors", + Help: "number of preflight full queue errors", + }) + preflightWP.durationMetric = prometheus.NewSummaryVec(prometheus.SummaryOpts{ + Namespace: daemon.MetricsNamespace(), + Subsystem: "preflight_pool", + Name: "request_ledger_get_duration_seconds", + Help: "preflight request duration broken down by status", + Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001}, + }, []string{"status", "type"}) + preflightWP.ledgerEntriesFetchedMetric = prometheus.NewSummary(prometheus.SummaryOpts{ + Namespace: daemon.MetricsNamespace(), + Subsystem: "preflight_pool", + Name: "request_ledger_entries_fetched", + Help: "ledger entries fetched by simulate transaction calls", + Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001}, + }) + daemon.MetricsRegistry().MustRegister( + requestQueueMetric, + preflightWP.concurrentRequestsMetric, + preflightWP.errorFullCounter, + preflightWP.durationMetric, + preflightWP.ledgerEntriesFetchedMetric, + ) + for i := uint(0); i < workerCount; i++ { + preflightWP.wg.Add(1) + go preflightWP.work() + } + return &preflightWP +} + +func (pwp *PreflightWorkerPool) work() { + defer pwp.wg.Done() + for request := range pwp.requestChan { + pwp.concurrentRequestsMetric.Inc() + startTime := time.Now() + preflight, err := GetPreflight(request.ctx, request.params) + status := "ok" + if err != nil { + status = "error" + } + pwp.durationMetric.With( + prometheus.Labels{"type": "all", "status": status}, + ).Observe(time.Since(startTime).Seconds()) + pwp.concurrentRequestsMetric.Dec() + request.resultChan <- workerResult{preflight, err} + } +} + +func (pwp *PreflightWorkerPool) Close() { + if !pwp.isClosed.CompareAndSwap(false, true) { + // it was already closed + return + } + close(pwp.requestChan) + pwp.wg.Wait() +} + +var PreflightQueueFullErr = errors.New("preflight queue full") + +type metricsLedgerEntryWrapper struct { + db.LedgerEntryReadTx + totalDurationMs uint64 + ledgerEntriesFetched uint32 +} + +func (m *metricsLedgerEntryWrapper) GetLedgerEntries(keys ...xdr.LedgerKey) ([]db.LedgerKeyAndEntry, error) { + startTime := time.Now() + entries, err := m.LedgerEntryReadTx.GetLedgerEntries(keys...) + atomic.AddUint64(&m.totalDurationMs, uint64(time.Since(startTime).Milliseconds())) + atomic.AddUint32(&m.ledgerEntriesFetched, uint32(len(keys))) + return entries, err +} + +func (pwp *PreflightWorkerPool) GetPreflight(ctx context.Context, params PreflightGetterParameters) (Preflight, error) { + if pwp.isClosed.Load() { + return Preflight{}, errors.New("preflight worker pool is closed") + } + wrappedTx := metricsLedgerEntryWrapper{ + LedgerEntryReadTx: params.LedgerEntryReadTx, + } + preflightParams := PreflightParameters{ + Logger: pwp.logger, + SourceAccount: params.SourceAccount, + OpBody: params.OperationBody, + NetworkPassphrase: pwp.networkPassphrase, + LedgerEntryReadTx: &wrappedTx, + BucketListSize: params.BucketListSize, + Footprint: params.Footprint, + ResourceConfig: params.ResourceConfig, + EnableDebug: pwp.enableDebug, + } + resultC := make(chan workerResult) + select { + case pwp.requestChan <- workerRequest{ctx, preflightParams, resultC}: + result := <-resultC + if wrappedTx.ledgerEntriesFetched > 0 { + status := "ok" + if result.err != nil { + status = "error" + } + pwp.durationMetric.With( + prometheus.Labels{"type": "db", "status": status}, + ).Observe(float64(wrappedTx.totalDurationMs) / 1000.0) + } + pwp.ledgerEntriesFetchedMetric.Observe(float64(wrappedTx.ledgerEntriesFetched)) + return result.preflight, result.err + case <-ctx.Done(): + return Preflight{}, ctx.Err() + default: + pwp.errorFullCounter.Inc() + return Preflight{}, PreflightQueueFullErr + } +} diff --git a/cmd/soroban-rpc/internal/preflight/preflight.go b/cmd/soroban-rpc/internal/preflight/preflight.go new file mode 100644 index 00000000..e342ab43 --- /dev/null +++ b/cmd/soroban-rpc/internal/preflight/preflight.go @@ -0,0 +1,277 @@ +package preflight + +import ( + "context" + "errors" + "fmt" + "runtime/cgo" + "time" + "unsafe" + + "github.com/stellar/go/support/log" + "github.com/stellar/go/xdr" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/db" +) + +/* +#include "../../lib/preflight.h" +#include +// This assumes that the Rust compiler should be using a -gnu target (i.e. MinGW compiler) in Windows +// (I (fons) am not even sure if CGo supports MSVC, see https://github.com/golang/go/issues/20982) +#cgo windows,amd64 LDFLAGS: -L${SRCDIR}/../../../../target/x86_64-pc-windows-gnu/release-with-panic-unwind/ -lpreflight -lntdll -static -lws2_32 -lbcrypt -luserenv +// You cannot compile with -static in macOS (and it's not worth it in Linux, at least with glibc) +#cgo darwin,amd64 LDFLAGS: -L${SRCDIR}/../../../../target/x86_64-apple-darwin/release-with-panic-unwind/ -lpreflight -ldl -lm +#cgo darwin,arm64 LDFLAGS: -L${SRCDIR}/../../../../target/aarch64-apple-darwin/release-with-panic-unwind/ -lpreflight -ldl -lm +// In Linux, at least for now, we will be dynamically linking glibc. See https://github.com/2opremio/soroban-go-rust-preflight-poc/issues/3 for details +// I (fons) did try linking statically against musl but it caused problems catching (unwinding) Rust panics. +#cgo linux,amd64 LDFLAGS: -L${SRCDIR}/../../../../target/x86_64-unknown-linux-gnu/release-with-panic-unwind/ -lpreflight -ldl -lm +#cgo linux,arm64 LDFLAGS: -L${SRCDIR}/../../../../target/aarch64-unknown-linux-gnu/release-with-panic-unwind/ -lpreflight -ldl -lm +*/ +import "C" + +type snapshotSourceHandle struct { + readTx db.LedgerEntryReadTx + logger *log.Entry +} + +const ( + defaultInstructionLeeway uint64 = 3000000 +) + +// SnapshotSourceGet takes a LedgerKey XDR in base64 string and returns its matching LedgerEntry XDR in base64 string +// It's used by the Rust preflight code to obtain ledger entries. +// +//export SnapshotSourceGet +func SnapshotSourceGet(handle C.uintptr_t, cLedgerKey C.xdr_t) C.xdr_t { + h := cgo.Handle(handle).Value().(snapshotSourceHandle) + ledgerKeyXDR := GoXDR(cLedgerKey) + var ledgerKey xdr.LedgerKey + if err := xdr.SafeUnmarshal(ledgerKeyXDR, &ledgerKey); err != nil { + panic(err) + } + // TODO : the live-until sequence here is being ignored for now; it should be passed downstream. + present, entry, _, err := db.GetLedgerEntry(h.readTx, ledgerKey) + if err != nil { + h.logger.WithError(err).Error("SnapshotSourceGet(): GetLedgerEntry() failed") + return C.xdr_t{} + } + if !present { + return C.xdr_t{} + } + out, err := entry.MarshalBinary() + if err != nil { + panic(err) + } + + return C.xdr_t{ + xdr: (*C.uchar)(C.CBytes(out)), + len: C.size_t(len(out)), + } +} + +//export FreeGoXDR +func FreeGoXDR(xdr C.xdr_t) { + C.free(unsafe.Pointer(xdr.xdr)) +} + +type ResourceConfig struct { + InstructionLeeway uint64 `json:"instructionLeeway"` +} + +func DefaultResourceConfig() ResourceConfig { + return ResourceConfig{ + InstructionLeeway: defaultInstructionLeeway, + } +} + +type PreflightGetterParameters struct { + LedgerEntryReadTx db.LedgerEntryReadTx + BucketListSize uint64 + SourceAccount xdr.AccountId + OperationBody xdr.OperationBody + Footprint xdr.LedgerFootprint + ResourceConfig ResourceConfig +} + +type PreflightParameters struct { + Logger *log.Entry + SourceAccount xdr.AccountId + OpBody xdr.OperationBody + Footprint xdr.LedgerFootprint + NetworkPassphrase string + LedgerEntryReadTx db.LedgerEntryReadTx + BucketListSize uint64 + ResourceConfig ResourceConfig + EnableDebug bool +} + +type Preflight struct { + Error string + Events [][]byte // DiagnosticEvents XDR + TransactionData []byte // SorobanTransactionData XDR + MinFee int64 + Result []byte // XDR SCVal in base64 + Auth [][]byte // SorobanAuthorizationEntries XDR + CPUInstructions uint64 + MemoryBytes uint64 + PreRestoreTransactionData []byte // SorobanTransactionData XDR + PreRestoreMinFee int64 +} + +func CXDR(xdr []byte) C.xdr_t { + return C.xdr_t{ + xdr: (*C.uchar)(C.CBytes(xdr)), + len: C.size_t(len(xdr)), + } +} + +func GoXDR(xdr C.xdr_t) []byte { + return C.GoBytes(unsafe.Pointer(xdr.xdr), C.int(xdr.len)) +} + +func GoXDRVector(xdrVector C.xdr_vector_t) [][]byte { + result := make([][]byte, xdrVector.len) + inputSlice := unsafe.Slice(xdrVector.array, xdrVector.len) + for i, v := range inputSlice { + result[i] = GoXDR(v) + } + return result +} + +func GetPreflight(ctx context.Context, params PreflightParameters) (Preflight, error) { + switch params.OpBody.Type { + case xdr.OperationTypeInvokeHostFunction: + return getInvokeHostFunctionPreflight(params) + case xdr.OperationTypeExtendFootprintTtl, xdr.OperationTypeRestoreFootprint: + return getFootprintTtlPreflight(params) + default: + return Preflight{}, fmt.Errorf("unsupported operation type: %s", params.OpBody.Type.String()) + } +} + +func getFootprintTtlPreflight(params PreflightParameters) (Preflight, error) { + opBodyXDR, err := params.OpBody.MarshalBinary() + if err != nil { + return Preflight{}, err + } + opBodyCXDR := CXDR(opBodyXDR) + footprintXDR, err := params.Footprint.MarshalBinary() + if err != nil { + return Preflight{}, err + } + footprintCXDR := CXDR(footprintXDR) + handle := cgo.NewHandle(snapshotSourceHandle{params.LedgerEntryReadTx, params.Logger}) + defer handle.Delete() + + simulationLedgerSeq, err := getSimulationLedgerSeq(params.LedgerEntryReadTx) + if err != nil { + return Preflight{}, err + } + + res := C.preflight_footprint_ttl_op( + C.uintptr_t(handle), + C.uint64_t(params.BucketListSize), + opBodyCXDR, + footprintCXDR, + C.uint32_t(simulationLedgerSeq), + ) + + FreeGoXDR(opBodyCXDR) + FreeGoXDR(footprintCXDR) + + return GoPreflight(res), nil +} + +func getSimulationLedgerSeq(readTx db.LedgerEntryReadTx) (uint32, error) { + latestLedger, err := readTx.GetLatestLedgerSequence() + if err != nil { + return 0, err + } + // It's of utmost importance to simulate the transactions like we were on the next ledger. + // Otherwise, users would need to wait for an extra ledger to close in order to observe the effects of the latest ledger + // transaction submission. + sequenceNumber := latestLedger + 1 + return sequenceNumber, nil +} + +func getInvokeHostFunctionPreflight(params PreflightParameters) (Preflight, error) { + invokeHostFunctionXDR, err := params.OpBody.MustInvokeHostFunctionOp().MarshalBinary() + if err != nil { + return Preflight{}, err + } + invokeHostFunctionCXDR := CXDR(invokeHostFunctionXDR) + sourceAccountXDR, err := params.SourceAccount.MarshalBinary() + if err != nil { + return Preflight{}, err + } + sourceAccountCXDR := CXDR(sourceAccountXDR) + + hasConfig, stateArchivalConfig, _, err := db.GetLedgerEntry(params.LedgerEntryReadTx, xdr.LedgerKey{ + Type: xdr.LedgerEntryTypeConfigSetting, + ConfigSetting: &xdr.LedgerKeyConfigSetting{ + ConfigSettingId: xdr.ConfigSettingIdConfigSettingStateArchival, + }, + }) + if err != nil { + return Preflight{}, err + } + if !hasConfig { + return Preflight{}, errors.New("state archival config setting missing in ledger storage") + } + + simulationLedgerSeq, err := getSimulationLedgerSeq(params.LedgerEntryReadTx) + if err != nil { + return Preflight{}, err + } + + stateArchival := stateArchivalConfig.Data.MustConfigSetting().MustStateArchivalSettings() + li := C.ledger_info_t{ + network_passphrase: C.CString(params.NetworkPassphrase), + sequence_number: C.uint32_t(simulationLedgerSeq), + protocol_version: 20, + timestamp: C.uint64_t(time.Now().Unix()), + // Current base reserve is 0.5XLM (in stroops) + base_reserve: 5_000_000, + min_temp_entry_ttl: C.uint(stateArchival.MinTemporaryTtl), + min_persistent_entry_ttl: C.uint(stateArchival.MinPersistentTtl), + max_entry_ttl: C.uint(stateArchival.MaxEntryTtl), + } + + handle := cgo.NewHandle(snapshotSourceHandle{params.LedgerEntryReadTx, params.Logger}) + defer handle.Delete() + resourceConfig := C.resource_config_t{ + instruction_leeway: C.uint64_t(params.ResourceConfig.InstructionLeeway), + } + res := C.preflight_invoke_hf_op( + C.uintptr_t(handle), + C.uint64_t(params.BucketListSize), + invokeHostFunctionCXDR, + sourceAccountCXDR, + li, + resourceConfig, + C.bool(params.EnableDebug), + ) + FreeGoXDR(invokeHostFunctionCXDR) + FreeGoXDR(sourceAccountCXDR) + + return GoPreflight(res), nil +} + +func GoPreflight(result *C.preflight_result_t) Preflight { + defer C.free_preflight_result(result) + + preflight := Preflight{ + Error: C.GoString(result.error), + Events: GoXDRVector(result.events), + TransactionData: GoXDR(result.transaction_data), + MinFee: int64(result.min_fee), + Result: GoXDR(result.result), + Auth: GoXDRVector(result.auth), + CPUInstructions: uint64(result.cpu_instructions), + MemoryBytes: uint64(result.memory_bytes), + PreRestoreTransactionData: GoXDR(result.pre_restore_transaction_data), + PreRestoreMinFee: int64(result.pre_restore_min_fee), + } + return preflight +} diff --git a/cmd/soroban-rpc/internal/preflight/preflight_test.go b/cmd/soroban-rpc/internal/preflight/preflight_test.go new file mode 100644 index 00000000..57a2e82b --- /dev/null +++ b/cmd/soroban-rpc/internal/preflight/preflight_test.go @@ -0,0 +1,441 @@ +package preflight + +import ( + "context" + "crypto/sha256" + "os" + "path" + "runtime" + "testing" + + "github.com/stellar/go/support/log" + "github.com/stellar/go/xdr" + "github.com/stretchr/testify/require" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/db" +) + +var mockContractID = xdr.Hash{0xa, 0xb, 0xc} +var mockContractHash = xdr.Hash{0xd, 0xe, 0xf} + +var contractCostParams = func() *xdr.ContractCostParams { + var result xdr.ContractCostParams + + for i := 0; i < 23; i++ { + result = append(result, xdr.ContractCostParamEntry{ + Ext: xdr.ExtensionPoint{}, + ConstTerm: 0, + LinearTerm: 0, + }) + } + + return &result +}() + +var mockLedgerEntriesWithoutTTLs = []xdr.LedgerEntry{ + { + LastModifiedLedgerSeq: 1, + Data: xdr.LedgerEntryData{ + Type: xdr.LedgerEntryTypeContractData, + ContractData: &xdr.ContractDataEntry{ + Contract: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &mockContractID, + }, + Key: xdr.ScVal{ + Type: xdr.ScValTypeScvLedgerKeyContractInstance, + }, + Durability: xdr.ContractDataDurabilityPersistent, + Val: xdr.ScVal{ + Type: xdr.ScValTypeScvContractInstance, + Instance: &xdr.ScContractInstance{ + Executable: xdr.ContractExecutable{ + Type: xdr.ContractExecutableTypeContractExecutableWasm, + WasmHash: &mockContractHash, + }, + Storage: nil, + }, + }, + }, + }, + }, + { + LastModifiedLedgerSeq: 2, + Data: xdr.LedgerEntryData{ + Type: xdr.LedgerEntryTypeContractCode, + ContractCode: &xdr.ContractCodeEntry{ + Hash: mockContractHash, + Code: helloWorldContract, + }, + }, + }, + { + LastModifiedLedgerSeq: 2, + Data: xdr.LedgerEntryData{ + Type: xdr.LedgerEntryTypeConfigSetting, + ConfigSetting: &xdr.ConfigSettingEntry{ + ConfigSettingId: xdr.ConfigSettingIdConfigSettingContractComputeV0, + ContractCompute: &xdr.ConfigSettingContractComputeV0{ + LedgerMaxInstructions: 100000000, + TxMaxInstructions: 100000000, + FeeRatePerInstructionsIncrement: 1, + TxMemoryLimit: 100000000, + }, + }, + }, + }, + { + LastModifiedLedgerSeq: 2, + Data: xdr.LedgerEntryData{ + Type: xdr.LedgerEntryTypeConfigSetting, + ConfigSetting: &xdr.ConfigSettingEntry{ + ConfigSettingId: xdr.ConfigSettingIdConfigSettingContractLedgerCostV0, + ContractLedgerCost: &xdr.ConfigSettingContractLedgerCostV0{ + LedgerMaxReadLedgerEntries: 100, + LedgerMaxReadBytes: 100, + LedgerMaxWriteLedgerEntries: 100, + LedgerMaxWriteBytes: 100, + TxMaxReadLedgerEntries: 100, + TxMaxReadBytes: 100, + TxMaxWriteLedgerEntries: 100, + TxMaxWriteBytes: 100, + FeeReadLedgerEntry: 100, + FeeWriteLedgerEntry: 100, + FeeRead1Kb: 100, + BucketListTargetSizeBytes: 100, + WriteFee1KbBucketListLow: 1, + WriteFee1KbBucketListHigh: 1, + BucketListWriteFeeGrowthFactor: 1, + }, + }, + }, + }, + { + LastModifiedLedgerSeq: 2, + Data: xdr.LedgerEntryData{ + Type: xdr.LedgerEntryTypeConfigSetting, + ConfigSetting: &xdr.ConfigSettingEntry{ + ConfigSettingId: xdr.ConfigSettingIdConfigSettingContractHistoricalDataV0, + ContractHistoricalData: &xdr.ConfigSettingContractHistoricalDataV0{ + FeeHistorical1Kb: 100, + }, + }, + }, + }, + { + LastModifiedLedgerSeq: 2, + Data: xdr.LedgerEntryData{ + Type: xdr.LedgerEntryTypeConfigSetting, + ConfigSetting: &xdr.ConfigSettingEntry{ + ConfigSettingId: xdr.ConfigSettingIdConfigSettingContractEventsV0, + ContractEvents: &xdr.ConfigSettingContractEventsV0{ + TxMaxContractEventsSizeBytes: 10000, + FeeContractEvents1Kb: 1, + }, + }, + }, + }, + { + LastModifiedLedgerSeq: 2, + Data: xdr.LedgerEntryData{ + Type: xdr.LedgerEntryTypeConfigSetting, + ConfigSetting: &xdr.ConfigSettingEntry{ + ConfigSettingId: xdr.ConfigSettingIdConfigSettingContractBandwidthV0, + ContractBandwidth: &xdr.ConfigSettingContractBandwidthV0{ + LedgerMaxTxsSizeBytes: 100000, + TxMaxSizeBytes: 1000, + FeeTxSize1Kb: 1, + }, + }, + }, + }, + { + LastModifiedLedgerSeq: 2, + Data: xdr.LedgerEntryData{ + Type: xdr.LedgerEntryTypeConfigSetting, + ConfigSetting: &xdr.ConfigSettingEntry{ + ConfigSettingId: xdr.ConfigSettingIdConfigSettingStateArchival, + StateArchivalSettings: &xdr.StateArchivalSettings{ + MaxEntryTtl: 100, + MinTemporaryTtl: 100, + MinPersistentTtl: 100, + PersistentRentRateDenominator: 100, + TempRentRateDenominator: 100, + MaxEntriesToArchive: 100, + BucketListSizeWindowSampleSize: 100, + EvictionScanSize: 100, + }, + }, + }, + }, + { + LastModifiedLedgerSeq: 2, + Data: xdr.LedgerEntryData{ + Type: xdr.LedgerEntryTypeConfigSetting, + ConfigSetting: &xdr.ConfigSettingEntry{ + ConfigSettingId: xdr.ConfigSettingIdConfigSettingContractCostParamsCpuInstructions, + // Obtained with TestGetLedgerEntryConfigSettings + ContractCostParamsCpuInsns: contractCostParams, + }, + }, + }, + { + LastModifiedLedgerSeq: 2, + Data: xdr.LedgerEntryData{ + Type: xdr.LedgerEntryTypeConfigSetting, + ConfigSetting: &xdr.ConfigSettingEntry{ + ConfigSettingId: xdr.ConfigSettingIdConfigSettingContractCostParamsMemoryBytes, + // Obtained with TestGetLedgerEntryConfigSettings + ContractCostParamsMemBytes: contractCostParams, + }, + }, + }, +} + +// Adds ttl entries to mockLedgerEntriesWithoutTTLs +var mockLedgerEntries = func() []xdr.LedgerEntry { + result := make([]xdr.LedgerEntry, 0, len(mockLedgerEntriesWithoutTTLs)) + for _, entry := range mockLedgerEntriesWithoutTTLs { + result = append(result, entry) + + if entry.Data.Type == xdr.LedgerEntryTypeContractData || entry.Data.Type == xdr.LedgerEntryTypeContractCode { + key, err := entry.LedgerKey() + if err != nil { + panic(err) + } + bin, err := key.MarshalBinary() + if err != nil { + panic(err) + } + ttlEntry := xdr.LedgerEntry{ + LastModifiedLedgerSeq: entry.LastModifiedLedgerSeq, + Data: xdr.LedgerEntryData{ + Type: xdr.LedgerEntryTypeTtl, + Ttl: &xdr.TtlEntry{ + KeyHash: sha256.Sum256(bin), + // Make sure it doesn't ttl + LiveUntilLedgerSeq: 1000, + }, + }, + } + result = append(result, ttlEntry) + } + } + return result +}() + +var helloWorldContract = func() []byte { + _, filename, _, _ := runtime.Caller(0) + testDirName := path.Dir(filename) + contractFile := path.Join(testDirName, "../../../../target/wasm32-unknown-unknown/test-wasms/test_hello_world.wasm") + ret, err := os.ReadFile(contractFile) + if err != nil { + log.Fatalf("unable to read test_hello_world.wasm (%v) please run `make build-test-wasms` at the project root directory", err) + } + return ret +}() + +type inMemoryLedgerEntryReadTx map[string]xdr.LedgerEntry + +func (m inMemoryLedgerEntryReadTx) GetLedgerEntries(keys ...xdr.LedgerKey) ([]db.LedgerKeyAndEntry, error) { + result := make([]db.LedgerKeyAndEntry, 0, len(keys)) + for _, key := range keys { + serializedKey, err := key.MarshalBinaryBase64() + if err != nil { + return nil, err + } + entry, ok := m[serializedKey] + if !ok { + continue + } + // We don't check the TTL but that's ok for the test + result = append(result, db.LedgerKeyAndEntry{ + Key: key, + Entry: entry, + }) + } + return result, nil +} + +func newInMemoryLedgerEntryReadTx(entries []xdr.LedgerEntry) (inMemoryLedgerEntryReadTx, error) { + result := make(map[string]xdr.LedgerEntry, len(entries)) + for _, entry := range entries { + key, err := entry.LedgerKey() + if err != nil { + return inMemoryLedgerEntryReadTx{}, err + } + serialized, err := key.MarshalBinaryBase64() + if err != nil { + return inMemoryLedgerEntryReadTx{}, err + } + result[serialized] = entry + } + return result, nil +} + +func (m inMemoryLedgerEntryReadTx) GetLatestLedgerSequence() (uint32, error) { + return 2, nil +} + +func (m inMemoryLedgerEntryReadTx) Done() error { + return nil +} + +func getDB(t testing.TB, restartDB bool) *db.DB { + dbPath := path.Join(t.TempDir(), "soroban_rpc.sqlite") + dbInstance, err := db.OpenSQLiteDB(dbPath) + require.NoError(t, err) + readWriter := db.NewReadWriter(dbInstance, 100, 10000) + tx, err := readWriter.NewTx(context.Background()) + require.NoError(t, err) + for _, e := range mockLedgerEntries { + err := tx.LedgerEntryWriter().UpsertLedgerEntry(e) + require.NoError(t, err) + } + err = tx.Commit(2) + require.NoError(t, err) + if restartDB { + // Restarting the DB resets the ledger entries write-through cache + require.NoError(t, dbInstance.Close()) + dbInstance, err = db.OpenSQLiteDB(dbPath) + require.NoError(t, err) + } + return dbInstance +} + +type preflightParametersDBConfig struct { + dbInstance *db.DB + disableCache bool +} + +func getPreflightParameters(t testing.TB, dbConfig *preflightParametersDBConfig) PreflightParameters { + var ledgerEntryReadTx db.LedgerEntryReadTx + if dbConfig != nil { + entryReader := db.NewLedgerEntryReader(dbConfig.dbInstance) + var err error + if dbConfig.disableCache { + ledgerEntryReadTx, err = entryReader.NewTx(context.Background()) + } else { + ledgerEntryReadTx, err = entryReader.NewCachedTx(context.Background()) + } + require.NoError(t, err) + } else { + var err error + ledgerEntryReadTx, err = newInMemoryLedgerEntryReadTx(mockLedgerEntries) + require.NoError(t, err) + } + argSymbol := xdr.ScSymbol("world") + params := PreflightParameters{ + EnableDebug: true, + Logger: log.New(), + SourceAccount: xdr.MustAddress("GBRPYHIL2CI3FNQ4BXLFMNDLFJUNPU2HY3ZMFSHONUCEOASW7QC7OX2H"), + OpBody: xdr.OperationBody{Type: xdr.OperationTypeInvokeHostFunction, + InvokeHostFunctionOp: &xdr.InvokeHostFunctionOp{ + HostFunction: xdr.HostFunction{ + Type: xdr.HostFunctionTypeHostFunctionTypeInvokeContract, + InvokeContract: &xdr.InvokeContractArgs{ + ContractAddress: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &mockContractID, + }, + FunctionName: "hello", + Args: []xdr.ScVal{ + { + Type: xdr.ScValTypeScvSymbol, + Sym: &argSymbol, + }, + }, + }, + }, + }}, + NetworkPassphrase: "foo", + LedgerEntryReadTx: ledgerEntryReadTx, + BucketListSize: 200, + } + return params +} + +func TestGetPreflight(t *testing.T) { + // in-memory + params := getPreflightParameters(t, nil) + result, err := GetPreflight(context.Background(), params) + require.NoError(t, err) + require.Empty(t, result.Error) + require.NoError(t, params.LedgerEntryReadTx.Done()) + + // using a restarted db with caching and + getDB(t, true) + dbConfig := &preflightParametersDBConfig{ + dbInstance: getDB(t, true), + disableCache: false, + } + params = getPreflightParameters(t, dbConfig) + result, err = GetPreflight(context.Background(), params) + require.NoError(t, err) + require.Empty(t, result.Error) + require.NoError(t, params.LedgerEntryReadTx.Done()) + require.NoError(t, dbConfig.dbInstance.Close()) +} + +func TestGetPreflightDebug(t *testing.T) { + params := getPreflightParameters(t, nil) + // Cause an error + params.OpBody.InvokeHostFunctionOp.HostFunction.InvokeContract.FunctionName = "bar" + + resultWithDebug, err := GetPreflight(context.Background(), params) + require.NoError(t, err) + require.NotZero(t, resultWithDebug.Error) + require.Contains(t, resultWithDebug.Error, "Backtrace") + require.Contains(t, resultWithDebug.Error, "Event log") + require.NotContains(t, resultWithDebug.Error, "DebugInfo not available") + + // Disable debug + params.EnableDebug = false + resultWithoutDebug, err := GetPreflight(context.Background(), params) + require.NoError(t, err) + require.NotZero(t, resultWithoutDebug.Error) + require.NotContains(t, resultWithoutDebug.Error, "Backtrace") + require.NotContains(t, resultWithoutDebug.Error, "Event log") + require.Contains(t, resultWithoutDebug.Error, "DebugInfo not available") +} + +type benchmarkDBConfig struct { + restart bool + disableCache bool +} + +type benchmarkConfig struct { + useDB *benchmarkDBConfig +} + +func benchmark(b *testing.B, config benchmarkConfig) { + var dbConfig *preflightParametersDBConfig + if config.useDB != nil { + dbConfig = &preflightParametersDBConfig{ + dbInstance: getDB(b, config.useDB.restart), + disableCache: config.useDB.disableCache, + } + } + + b.ResetTimer() + b.StopTimer() + for i := 0; i < b.N; i++ { + params := getPreflightParameters(b, dbConfig) + b.StartTimer() + result, err := GetPreflight(context.Background(), params) + b.StopTimer() + require.NoError(b, err) + require.Empty(b, result.Error) + require.NoError(b, params.LedgerEntryReadTx.Done()) + } + if dbConfig != nil { + require.NoError(b, dbConfig.dbInstance.Close()) + } +} + +func BenchmarkGetPreflight(b *testing.B) { + b.Run("In-memory storage", func(b *testing.B) { benchmark(b, benchmarkConfig{}) }) + b.Run("DB storage", func(b *testing.B) { benchmark(b, benchmarkConfig{useDB: &benchmarkDBConfig{}}) }) + b.Run("DB storage, restarting", func(b *testing.B) { benchmark(b, benchmarkConfig{useDB: &benchmarkDBConfig{restart: true}}) }) + b.Run("DB storage, no cache", func(b *testing.B) { benchmark(b, benchmarkConfig{useDB: &benchmarkDBConfig{disableCache: true}}) }) +} diff --git a/cmd/soroban-rpc/internal/test/captive-core-integration-tests.cfg b/cmd/soroban-rpc/internal/test/captive-core-integration-tests.cfg new file mode 100644 index 00000000..275599ba --- /dev/null +++ b/cmd/soroban-rpc/internal/test/captive-core-integration-tests.cfg @@ -0,0 +1,19 @@ +PEER_PORT=11725 +ARTIFICIALLY_ACCELERATE_TIME_FOR_TESTING=true + +UNSAFE_QUORUM=true +FAILURE_SAFETY=0 + +ENABLE_SOROBAN_DIAGNOSTIC_EVENTS=true +# Lower the TTL of persistent ledger entries +# so that ledger entry extension/restoring becomes testeable +TESTING_MINIMUM_PERSISTENT_ENTRY_LIFETIME=10 +TESTING_SOROBAN_HIGH_LIMIT_OVERRIDE=true + +[[VALIDATORS]] +NAME="local_core" +HOME_DOMAIN="core.local" +# From "SACJC372QBSSKJYTV5A7LWT4NXWHTQO6GHG4QDAVC2XDPX6CNNXFZ4JK" +PUBLIC_KEY="GD5KD2KEZJIGTC63IGW6UMUSMVUVG5IHG64HUTFWCHVZH2N2IBOQN7PS" +ADDRESS="localhost" +QUALITY="MEDIUM" diff --git a/cmd/soroban-rpc/internal/test/cli_test.go b/cmd/soroban-rpc/internal/test/cli_test.go new file mode 100644 index 00000000..997372ed --- /dev/null +++ b/cmd/soroban-rpc/internal/test/cli_test.go @@ -0,0 +1,383 @@ +package test + +import ( + "context" + "crypto/sha256" + "encoding/hex" + "fmt" + "os" + "strconv" + "strings" + "testing" + + "github.com/creachadair/jrpc2" + "github.com/creachadair/jrpc2/jhttp" + "github.com/google/shlex" + "github.com/stellar/go/keypair" + "github.com/stellar/go/strkey" + "github.com/stellar/go/txnbuild" + "github.com/stellar/go/xdr" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gotest.tools/v3/icmd" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/methods" +) + +func cargoTest(t *testing.T, name string) { + NewCLITest(t) + c := icmd.Command("cargo", "test", "--features", "integration", "--package", "soroban-test", "--test", "it", "--", name, "--exact", "--nocapture") + c.Env = append(os.Environ(), + fmt.Sprintf("SOROBAN_RPC_URL=http://localhost:%d/", sorobanRPCPort), + fmt.Sprintf("SOROBAN_NETWORK_PASSPHRASE=%s", StandaloneNetworkPassphrase), + ) + res := icmd.RunCmd(c) + require.NoError(t, res.Error, res.Stdout(), res.Stderr()) +} + +func TestCLICargoTest(t *testing.T) { + names := icmd.RunCmd(icmd.Command("cargo", "-q", "test", "integration::", "--package", "soroban-test", "--features", "integration", "--", "--list")) + input := names.Stdout() + lines := strings.Split(strings.TrimSpace(input), "\n") + for _, line := range lines { + testName := strings.TrimSuffix(line, ": test") + t.Run(testName, func(t *testing.T) { + cargoTest(t, testName) + }) + } +} + +func TestCLIWrapCustom(t *testing.T) { + it := NewCLITest(t) + assetCode := "deadbeef" + issuerAccount := getCLIDefaultAccount(t) + strkeyContractID := runSuccessfulCLICmd(t, fmt.Sprintf("contract asset deploy --asset=%s:%s", assetCode, issuerAccount)) + require.Equal(t, "true", runSuccessfulCLICmd(t, fmt.Sprintf("contract invoke --id=%s -- authorized --id=%s", strkeyContractID, issuerAccount))) + asset := txnbuild.CreditAsset{ + Code: assetCode, + Issuer: issuerAccount, + } + establishAccountTrustline(t, it, it.MasterKey(), it.MasterAccount(), asset) + masterAccount := keypair.Root(StandaloneNetworkPassphrase).Address() + runSuccessfulCLICmd(t, fmt.Sprintf("contract invoke --id=%s -- mint --to=%s --amount 1", strkeyContractID, masterAccount)) +} + +func TestCLIWrapNative(t *testing.T) { + NewCLITest(t) + testAccount := getCLIDefaultAccount(t) + strkeyContractID := runSuccessfulCLICmd(t, fmt.Sprintf("contract asset deploy --asset=native:%s", testAccount)) + require.Equal(t, "CAMTHSPKXZJIRTUXQP5QWJIFH3XIDMKLFAWVQOFOXPTKAW5GKV37ZC4N", strkeyContractID) + require.Equal(t, "true", runSuccessfulCLICmd(t, fmt.Sprintf("contract invoke --id=%s -- authorized --id=%s", strkeyContractID, testAccount))) + require.Equal(t, "\"9223372036854775807\"", runSuccessfulCLICmd(t, fmt.Sprintf("contract invoke --id=%s -- balance --id %s", strkeyContractID, testAccount))) +} + +func TestCLIContractInstall(t *testing.T) { + NewCLITest(t) + output := runSuccessfulCLICmd(t, fmt.Sprintf("contract install --wasm %s --ignore-checks", helloWorldContractPath)) + wasm := getHelloWorldContract(t) + contractHash := xdr.Hash(sha256.Sum256(wasm)) + require.Contains(t, output, contractHash.HexString()) +} + +func TestCLIContractInstallAndDeploy(t *testing.T) { + NewCLITest(t) + runSuccessfulCLICmd(t, fmt.Sprintf("contract install --wasm %s --ignore-checks", helloWorldContractPath)) + wasm := getHelloWorldContract(t) + contractHash := xdr.Hash(sha256.Sum256(wasm)) + output := runSuccessfulCLICmd(t, fmt.Sprintf("contract deploy --salt %s --wasm-hash %s --ignore-checks", hex.EncodeToString(testSalt[:]), contractHash.HexString())) + outputsContractIDInLastLine(t, output) +} + +func TestCLIContractDeploy(t *testing.T) { + NewCLITest(t) + output := runSuccessfulCLICmd(t, fmt.Sprintf("contract deploy --salt %s --wasm %s --ignore-checks", hex.EncodeToString(testSalt[:]), helloWorldContractPath)) + outputsContractIDInLastLine(t, output) +} + +func outputsContractIDInLastLine(t *testing.T, output string) { + lines := strings.Split(output, "\n") + nonEmptyLines := make([]string, 0, len(lines)) + for _, l := range lines { + if l != "" { + nonEmptyLines = append(nonEmptyLines, l) + } + } + require.GreaterOrEqual(t, len(nonEmptyLines), 1) + contractID := nonEmptyLines[len(nonEmptyLines)-1] + require.Len(t, contractID, 56) + require.Regexp(t, "^C", contractID) +} + +func TestCLIContractDeployAndInvoke(t *testing.T) { + NewCLITest(t) + contractID := runSuccessfulCLICmd(t, fmt.Sprintf("contract deploy --salt=%s --wasm %s --ignore-checks", hex.EncodeToString(testSalt[:]), helloWorldContractPath)) + output := runSuccessfulCLICmd(t, fmt.Sprintf("contract invoke --id %s -- hello --world=world", contractID)) + require.Contains(t, output, `["Hello","world"]`) +} + +func TestCLIRestorePreamble(t *testing.T) { + test := NewCLITest(t) + strkeyContractID := runSuccessfulCLICmd(t, fmt.Sprintf("contract deploy --salt=%s --wasm %s --ignore-checks", hex.EncodeToString(testSalt[:]), helloWorldContractPath)) + count := runSuccessfulCLICmd(t, fmt.Sprintf("contract invoke --id %s -- inc", strkeyContractID)) + require.Equal(t, "1", count) + count = runSuccessfulCLICmd(t, fmt.Sprintf("contract invoke --id %s -- inc", strkeyContractID)) + require.Equal(t, "2", count) + + // Wait for the counter ledger entry to ttl and successfully invoke the `inc` contract function again + // This ensures that the CLI restores the entry (using the RestorePreamble in the simulateTransaction response) + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + waitUntilLedgerEntryTTL(t, client, getCounterLedgerKey(parseContractStrKey(t, strkeyContractID))) + + count = runSuccessfulCLICmd(t, fmt.Sprintf("contract invoke --id %s -- inc", strkeyContractID)) + require.Equal(t, "3", count) +} + +func TestCLIExtend(t *testing.T) { + test := NewCLITest(t) + strkeyContractID := runSuccessfulCLICmd(t, fmt.Sprintf("contract deploy --salt=%s --wasm %s --ignore-checks", hex.EncodeToString(testSalt[:]), helloWorldContractPath)) + count := runSuccessfulCLICmd(t, fmt.Sprintf("contract invoke --id %s -- inc", strkeyContractID)) + require.Equal(t, "1", count) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + ttlKey := getCounterLedgerKey(parseContractStrKey(t, strkeyContractID)) + initialLiveUntilSeq := getLedgerEntryLiveUntil(t, client, ttlKey) + + extendOutput := runSuccessfulCLICmd( + t, + fmt.Sprintf( + "contract extend --id %s --key COUNTER --durability persistent --ledgers-to-extend 20", + strkeyContractID, + ), + ) + + newLiveUntilSeq := getLedgerEntryLiveUntil(t, client, ttlKey) + assert.Greater(t, newLiveUntilSeq, initialLiveUntilSeq) + assert.Equal(t, fmt.Sprintf("New ttl ledger: %d", newLiveUntilSeq), extendOutput) +} +func TestCLIExtendTooLow(t *testing.T) { + test := NewCLITest(t) + strkeyContractID := runSuccessfulCLICmd(t, fmt.Sprintf("contract deploy --salt=%s --wasm %s --ignore-checks", hex.EncodeToString(testSalt[:]), helloWorldContractPath)) + count := runSuccessfulCLICmd(t, fmt.Sprintf("contract invoke --id %s -- inc", strkeyContractID)) + require.Equal(t, "1", count) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + ttlKey := getCounterLedgerKey(parseContractStrKey(t, strkeyContractID)) + initialLiveUntilSeq := parseInt(t, getLedgerEntryLiveUntil(t, client, ttlKey).GoString()) + + extendOutput := extend(t, strkeyContractID, "400", "--key COUNTER ") + + newLiveUntilSeq := parseInt(t, getLedgerEntryLiveUntil(t, client, ttlKey).GoString()) + assert.Greater(t, newLiveUntilSeq, initialLiveUntilSeq) + assert.Equal(t, newLiveUntilSeq, extendOutput) + + updatedLiveUntilSeq := extend(t, strkeyContractID, "15", "--key COUNTER") + assert.Equal(t, extendOutput, updatedLiveUntilSeq) +} + +func TestCLIExtendTooHigh(t *testing.T) { + test := NewCLITest(t) + strkeyContractID := runSuccessfulCLICmd(t, fmt.Sprintf("contract deploy --salt=%s --wasm %s --ignore-checks", hex.EncodeToString(testSalt[:]), helloWorldContractPath)) + count := runSuccessfulCLICmd(t, fmt.Sprintf("contract invoke --id %s -- inc", strkeyContractID)) + require.Equal(t, "1", count) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + ttlKey := getCounterLedgerKey(parseContractStrKey(t, strkeyContractID)) + initialLiveUntilSeq := parseInt(t, getLedgerEntryLiveUntil(t, client, ttlKey).GoString()) + + extendOutput := extend(t, strkeyContractID, "100000000", "--key COUNTER ") + + newLiveUntilSeq := parseInt(t, getLedgerEntryLiveUntil(t, client, ttlKey).GoString()) + assert.Greater(t, newLiveUntilSeq, initialLiveUntilSeq) + assert.Equal(t, newLiveUntilSeq, extendOutput) +} + +func TestCLIRestore(t *testing.T) { + test := NewCLITest(t) + strkeyContractID := runSuccessfulCLICmd(t, fmt.Sprintf("contract deploy --salt=%s --wasm %s --ignore-checks", hex.EncodeToString(testSalt[:]), helloWorldContractPath)) + count := runSuccessfulCLICmd(t, fmt.Sprintf("contract invoke --id %s -- inc", strkeyContractID)) + require.Equal(t, "1", count) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + ttlKey := getCounterLedgerKey(parseContractStrKey(t, strkeyContractID)) + initialLiveUntilSeq := getLedgerEntryLiveUntil(t, client, ttlKey) + // Wait for the counter ledger entry to ttl and successfully invoke the `inc` contract function again + // This ensures that the CLI restores the entry (using the RestorePreamble in the simulateTransaction response) + waitUntilLedgerEntryTTL(t, client, ttlKey) + + restoreOutput := runSuccessfulCLICmd( + t, + fmt.Sprintf( + "contract restore --id %s --key COUNTER --durability persistent", + strkeyContractID, + ), + ) + + newLiveUntilSeq := getLedgerEntryLiveUntil(t, client, getCounterLedgerKey(parseContractStrKey(t, strkeyContractID))) + assert.Greater(t, newLiveUntilSeq, initialLiveUntilSeq) + assert.Equal(t, fmt.Sprintf("New ttl ledger: %d", newLiveUntilSeq), restoreOutput) + + // FIXME: the following checks shouldn't live here: + + // test to see that we get an error when requesting the ttl ledger entry explicitly. + ledgerTTLEntry := getTtlKey(t, getCounterLedgerKey(parseContractStrKey(t, strkeyContractID))) + ledgerTTLEntryB64, err := xdr.MarshalBase64(ledgerTTLEntry) + require.NoError(t, err) + var getLedgerEntryResult methods.GetLedgerEntryResponse + err = client.CallResult(context.Background(), "getLedgerEntry", methods.GetLedgerEntryRequest{ + Key: ledgerTTLEntryB64, + }, &getLedgerEntryResult) + require.Error(t, err) + require.Contains(t, err.Error(), methods.ErrLedgerTtlEntriesCannotBeQueriedDirectly) + + // repeat with getLedgerEntries + var getLedgerEntriesResult methods.GetLedgerEntriesResponse + err = client.CallResult(context.Background(), "getLedgerEntries", methods.GetLedgerEntriesRequest{ + Keys: []string{ledgerTTLEntryB64}, + }, &getLedgerEntriesResult) + require.Error(t, err) + require.Contains(t, err.Error(), methods.ErrLedgerTtlEntriesCannotBeQueriedDirectly) +} + +func getTtlKey(t *testing.T, key xdr.LedgerKey) xdr.LedgerKey { + assert.True(t, key.Type == xdr.LedgerEntryTypeContractCode || key.Type == xdr.LedgerEntryTypeContractData) + binKey, err := key.MarshalBinary() + assert.NoError(t, err) + return xdr.LedgerKey{ + Type: xdr.LedgerEntryTypeTtl, + Ttl: &xdr.LedgerKeyTtl{ + KeyHash: sha256.Sum256(binKey), + }, + } +} + +func parseContractStrKey(t *testing.T, strkeyContractID string) [32]byte { + contractIDBytes := strkey.MustDecode(strkey.VersionByteContract, strkeyContractID) + var contractID [32]byte + require.Len(t, contractIDBytes, len(contractID)) + copy(contractID[:], contractIDBytes) + return contractID +} + +func runSuccessfulCLICmd(t *testing.T, cmd string) string { + res := runCLICommand(t, cmd) + stdout, stderr := res.Stdout(), res.Stderr() + outputs := fmt.Sprintf("stderr:\n%s\nstdout:\n%s\n", stderr, stdout) + require.NoError(t, res.Error, outputs) + fmt.Print(outputs) + return strings.TrimSpace(stdout) +} + +func runCLICommand(t *testing.T, cmd string) *icmd.Result { + args := []string{"run", "-q", "--", "--vv"} + parsedArgs, err := shlex.Split(cmd) + require.NoError(t, err, cmd) + args = append(args, parsedArgs...) + c := icmd.Command("cargo", args...) + c.Env = append(os.Environ(), + fmt.Sprintf("SOROBAN_RPC_URL=http://localhost:%d/", sorobanRPCPort), + fmt.Sprintf("SOROBAN_NETWORK_PASSPHRASE=%s", StandaloneNetworkPassphrase), + "SOROBAN_ACCOUNT=test", + ) + return icmd.RunCmd(c) +} + +func getCLIDefaultAccount(t *testing.T) string { + runSuccessfulCLICmd(t, "keys generate -d test --no-fund") + return "GDIY6AQQ75WMD4W46EYB7O6UYMHOCGQHLAQGQTKHDX4J2DYQCHVCR4W4" +} + +func NewCLITest(t *testing.T) *Test { + test := NewTest(t) + fundAccount(t, test, getCLIDefaultAccount(t), "1000000") + return test +} + +func fundAccount(t *testing.T, test *Test, account string, amount string) { + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + tx, err := txnbuild.NewTransaction(txnbuild.TransactionParams{ + SourceAccount: test.MasterAccount(), + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{&txnbuild.CreateAccount{ + Destination: account, + Amount: amount, + }}, + BaseFee: txnbuild.MinBaseFee, + Memo: nil, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + require.NoError(t, err) + sendSuccessfulTransaction(t, client, test.MasterKey(), tx) +} + +func establishAccountTrustline(t *testing.T, test *Test, kp *keypair.Full, account txnbuild.Account, asset txnbuild.Asset) { + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + line := asset.MustToChangeTrustAsset() + tx, err := txnbuild.NewTransaction(txnbuild.TransactionParams{ + SourceAccount: account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{&txnbuild.ChangeTrust{ + Line: line, + Limit: "2000", + }}, + BaseFee: txnbuild.MinBaseFee, + Memo: nil, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + require.NoError(t, err) + sendSuccessfulTransaction(t, client, kp, tx) +} + +func parseInt(t *testing.T, s string) uint64 { + i, err := strconv.ParseUint(strings.TrimSpace(s), 10, 64) + require.NoError(t, err) + return i +} + +func extend(t *testing.T, contractId string, amount string, rest string) uint64 { + + res := runSuccessfulCLICmd( + t, + fmt.Sprintf( + "contract extend --ttl-ledger-only --id=%s --durability persistent --ledgers-to-extend=%s %s", + contractId, + amount, + rest, + ), + ) + + return parseInt(t, res) +} + +func getLedgerEntryLiveUntil(t *testing.T, client *jrpc2.Client, ttlLedgerKey xdr.LedgerKey) xdr.Uint32 { + keyB64, err := xdr.MarshalBase64(ttlLedgerKey) + require.NoError(t, err) + getLedgerEntryrequest := methods.GetLedgerEntryRequest{ + Key: keyB64, + } + var getLedgerEntryResult methods.GetLedgerEntryResponse + err = client.CallResult(context.Background(), "getLedgerEntry", getLedgerEntryrequest, &getLedgerEntryResult) + require.NoError(t, err) + var entry xdr.LedgerEntryData + require.NoError(t, xdr.SafeUnmarshalBase64(getLedgerEntryResult.XDR, &entry)) + + require.Contains(t, []xdr.LedgerEntryType{xdr.LedgerEntryTypeContractCode, xdr.LedgerEntryTypeContractData}, entry.Type) + require.NotNil(t, getLedgerEntryResult.LiveUntilLedgerSeq) + return xdr.Uint32(*getLedgerEntryResult.LiveUntilLedgerSeq) +} diff --git a/cmd/soroban-rpc/internal/test/core-start.sh b/cmd/soroban-rpc/internal/test/core-start.sh new file mode 100755 index 00000000..9dd89ba6 --- /dev/null +++ b/cmd/soroban-rpc/internal/test/core-start.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash + +set -e +set -x + +source /etc/profile +# work within the current docker working dir +if [ ! -f "./stellar-core.cfg" ]; then + cp /stellar-core.cfg ./ +fi + +echo "using config:" +cat stellar-core.cfg + +# initialize new db +stellar-core new-db + +if [ "$1" = "standalone" ]; then + # initialize for new history archive path, remove any pre-existing on same path from base image + rm -rf ./history + stellar-core new-hist vs + + # serve history archives to horizon on port 1570 + pushd ./history/vs/ + python3 -m http.server 1570 & + popd +fi + +exec stellar-core run --console diff --git a/cmd/soroban-rpc/internal/test/cors_test.go b/cmd/soroban-rpc/internal/test/cors_test.go new file mode 100644 index 00000000..2e0cdb3e --- /dev/null +++ b/cmd/soroban-rpc/internal/test/cors_test.go @@ -0,0 +1,32 @@ +package test + +import ( + "bytes" + "io" + "net/http" + "testing" + + "github.com/stretchr/testify/require" +) + +// TestCORS ensures that we receive the correct CORS headers as a response to an HTTP request. +// Specifically, when we include an Origin header in the request, a soroban-rpc should response +// with a corresponding Access-Control-Allow-Origin. +func TestCORS(t *testing.T) { + test := NewTest(t) + + request, err := http.NewRequest("POST", test.sorobanRPCURL(), bytes.NewBufferString("{\"jsonrpc\": \"2.0\", \"id\": 1, \"method\": \"getHealth\"}")) + require.NoError(t, err) + request.Header.Set("Content-Type", "application/json") + origin := "testorigin.com" + request.Header.Set("Origin", origin) + + var client http.Client + response, err := client.Do(request) + require.NoError(t, err) + _, err = io.ReadAll(response.Body) + require.NoError(t, err) + + accessControl := response.Header.Get("Access-Control-Allow-Origin") + require.Equal(t, origin, accessControl) +} diff --git a/cmd/soroban-rpc/internal/test/docker-compose.yml b/cmd/soroban-rpc/internal/test/docker-compose.yml new file mode 100644 index 00000000..b7309cdc --- /dev/null +++ b/cmd/soroban-rpc/internal/test/docker-compose.yml @@ -0,0 +1,35 @@ +version: '3' +services: + core-postgres: + image: postgres:9.6.17-alpine + restart: on-failure + environment: + - POSTGRES_PASSWORD=mysecretpassword + - POSTGRES_DB=stellar + expose: + - "5641" + command: [ "-p", "5641" ] + + core: + platform: linux/amd64 + # Note: Please keep the image pinned to an immutable tag matching the Captive Core version. + # This avoids implicit updates which break compatibility between + # the Core container and captive core. + image: ${CORE_IMAGE:-stellar/unsafe-stellar-core:20.1.0-1656.114b833e7.focal} + depends_on: + - core-postgres + restart: on-failure + environment: + - TRACY_NO_INVARIANT_CHECK=1 + ports: + - "11625:11625" + - "11626:11626" + # add extra port for history archive server + - "1570:1570" + entrypoint: /usr/bin/env + command: /start standalone + volumes: + - ./stellar-core-integration-tests.cfg:/stellar-core.cfg + - ./core-start.sh:/start + extra_hosts: + - "host.docker.internal:host-gateway" diff --git a/cmd/soroban-rpc/internal/test/get_ledger_entries_test.go b/cmd/soroban-rpc/internal/test/get_ledger_entries_test.go new file mode 100644 index 00000000..46b0b25d --- /dev/null +++ b/cmd/soroban-rpc/internal/test/get_ledger_entries_test.go @@ -0,0 +1,179 @@ +package test + +import ( + "context" + "crypto/sha256" + "testing" + + "github.com/creachadair/jrpc2" + "github.com/creachadair/jrpc2/jhttp" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/stellar/go/keypair" + "github.com/stellar/go/txnbuild" + "github.com/stellar/go/xdr" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/methods" +) + +func TestGetLedgerEntriesNotFound(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + sourceAccount := keypair.Root(StandaloneNetworkPassphrase).Address() + contractID := getContractID(t, sourceAccount, testSalt, StandaloneNetworkPassphrase) + contractIDHash := xdr.Hash(contractID) + keyB64, err := xdr.MarshalBase64(xdr.LedgerKey{ + Type: xdr.LedgerEntryTypeContractData, + ContractData: &xdr.LedgerKeyContractData{ + Contract: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &contractIDHash, + }, + Key: xdr.ScVal{ + Type: xdr.ScValTypeScvLedgerKeyContractInstance, + }, + Durability: xdr.ContractDataDurabilityPersistent, + }, + }) + require.NoError(t, err) + + var keys []string + keys = append(keys, keyB64) + request := methods.GetLedgerEntriesRequest{ + Keys: keys, + } + + var result methods.GetLedgerEntriesResponse + err = client.CallResult(context.Background(), "getLedgerEntries", request, &result) + require.NoError(t, err) + + assert.Equal(t, 0, len(result.Entries)) + assert.Greater(t, result.LatestLedger, uint32(0)) +} + +func TestGetLedgerEntriesInvalidParams(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + var keys []string + keys = append(keys, "<>@@#$") + request := methods.GetLedgerEntriesRequest{ + Keys: keys, + } + + var result methods.GetLedgerEntriesResponse + jsonRPCErr := client.CallResult(context.Background(), "getLedgerEntries", request, &result).(*jrpc2.Error) + assert.Contains(t, jsonRPCErr.Message, "cannot unmarshal key value") + assert.Equal(t, jrpc2.InvalidParams, jsonRPCErr.Code) +} + +func TestGetLedgerEntriesSucceeds(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + sourceAccount := keypair.Root(StandaloneNetworkPassphrase) + address := sourceAccount.Address() + account := txnbuild.NewSimpleAccount(address, 0) + + contractBinary := getHelloWorldContract(t) + params := preflightTransactionParams(t, client, txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + createInstallContractCodeOperation(account.AccountID, contractBinary), + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + tx, err := txnbuild.NewTransaction(params) + assert.NoError(t, err) + sendSuccessfulTransaction(t, client, sourceAccount, tx) + + params = preflightTransactionParams(t, client, txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + createCreateContractOperation(address, contractBinary), + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + tx, err = txnbuild.NewTransaction(params) + assert.NoError(t, err) + sendSuccessfulTransaction(t, client, sourceAccount, tx) + + contractID := getContractID(t, address, testSalt, StandaloneNetworkPassphrase) + + contractHash := sha256.Sum256(contractBinary) + contractCodeKeyB64, err := xdr.MarshalBase64(xdr.LedgerKey{ + Type: xdr.LedgerEntryTypeContractCode, + ContractCode: &xdr.LedgerKeyContractCode{ + Hash: contractHash, + }, + }) + + // Doesn't exist. + notFoundKeyB64, err := xdr.MarshalBase64(getCounterLedgerKey(contractID)) + require.NoError(t, err) + + contractIDHash := xdr.Hash(contractID) + contractInstanceKeyB64, err := xdr.MarshalBase64(xdr.LedgerKey{ + Type: xdr.LedgerEntryTypeContractData, + ContractData: &xdr.LedgerKeyContractData{ + Contract: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &contractIDHash, + }, + Key: xdr.ScVal{ + Type: xdr.ScValTypeScvLedgerKeyContractInstance, + }, + Durability: xdr.ContractDataDurabilityPersistent, + }, + }) + require.NoError(t, err) + + keys := []string{contractCodeKeyB64, notFoundKeyB64, contractInstanceKeyB64} + request := methods.GetLedgerEntriesRequest{ + Keys: keys, + } + + var result methods.GetLedgerEntriesResponse + err = client.CallResult(context.Background(), "getLedgerEntries", request, &result) + require.NoError(t, err) + require.Equal(t, 2, len(result.Entries)) + require.Greater(t, result.LatestLedger, uint32(0)) + + require.Greater(t, result.Entries[0].LastModifiedLedger, uint32(0)) + require.LessOrEqual(t, result.Entries[0].LastModifiedLedger, result.LatestLedger) + require.NotNil(t, result.Entries[0].LiveUntilLedgerSeq) + require.Greater(t, *result.Entries[0].LiveUntilLedgerSeq, result.LatestLedger) + require.Equal(t, contractCodeKeyB64, result.Entries[0].Key) + var firstEntry xdr.LedgerEntryData + require.NoError(t, xdr.SafeUnmarshalBase64(result.Entries[0].XDR, &firstEntry)) + require.Equal(t, xdr.LedgerEntryTypeContractCode, firstEntry.Type) + require.Equal(t, contractBinary, firstEntry.MustContractCode().Code) + + require.Greater(t, result.Entries[1].LastModifiedLedger, uint32(0)) + require.LessOrEqual(t, result.Entries[1].LastModifiedLedger, result.LatestLedger) + require.NotNil(t, result.Entries[1].LiveUntilLedgerSeq) + require.Greater(t, *result.Entries[1].LiveUntilLedgerSeq, result.LatestLedger) + require.Equal(t, contractInstanceKeyB64, result.Entries[1].Key) + var secondEntry xdr.LedgerEntryData + require.NoError(t, xdr.SafeUnmarshalBase64(result.Entries[1].XDR, &secondEntry)) + require.Equal(t, xdr.LedgerEntryTypeContractData, secondEntry.Type) + require.True(t, secondEntry.MustContractData().Key.Equals(xdr.ScVal{ + Type: xdr.ScValTypeScvLedgerKeyContractInstance, + })) +} diff --git a/cmd/soroban-rpc/internal/test/get_ledger_entry_test.go b/cmd/soroban-rpc/internal/test/get_ledger_entry_test.go new file mode 100644 index 00000000..dd4879d5 --- /dev/null +++ b/cmd/soroban-rpc/internal/test/get_ledger_entry_test.go @@ -0,0 +1,115 @@ +package test + +import ( + "context" + "crypto/sha256" + "testing" + + "github.com/creachadair/jrpc2" + "github.com/creachadair/jrpc2/jhttp" + "github.com/stellar/go/txnbuild" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/stellar/go/keypair" + "github.com/stellar/go/xdr" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/methods" +) + +func TestGetLedgerEntryNotFound(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + sourceAccount := keypair.Root(StandaloneNetworkPassphrase).Address() + contractID := getContractID(t, sourceAccount, testSalt, StandaloneNetworkPassphrase) + contractIDHash := xdr.Hash(contractID) + keyB64, err := xdr.MarshalBase64(xdr.LedgerKey{ + Type: xdr.LedgerEntryTypeContractData, + ContractData: &xdr.LedgerKeyContractData{ + Contract: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &contractIDHash, + }, + Key: xdr.ScVal{ + Type: xdr.ScValTypeScvLedgerKeyContractInstance, + }, + Durability: xdr.ContractDataDurabilityPersistent, + }, + }) + require.NoError(t, err) + request := methods.GetLedgerEntryRequest{ + Key: keyB64, + } + + var result methods.GetLedgerEntryResponse + jsonRPCErr := client.CallResult(context.Background(), "getLedgerEntry", request, &result).(*jrpc2.Error) + assert.Contains(t, jsonRPCErr.Message, "not found") + assert.Equal(t, jrpc2.InvalidRequest, jsonRPCErr.Code) +} + +func TestGetLedgerEntryInvalidParams(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + request := methods.GetLedgerEntryRequest{ + Key: "<>@@#$", + } + + var result methods.GetLedgerEntryResponse + jsonRPCErr := client.CallResult(context.Background(), "getLedgerEntry", request, &result).(*jrpc2.Error) + assert.Equal(t, "cannot unmarshal key value", jsonRPCErr.Message) + assert.Equal(t, jrpc2.InvalidParams, jsonRPCErr.Code) +} + +func TestGetLedgerEntrySucceeds(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + kp := keypair.Root(StandaloneNetworkPassphrase) + account := txnbuild.NewSimpleAccount(kp.Address(), 0) + + contractBinary := getHelloWorldContract(t) + params := preflightTransactionParams(t, client, txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + createInstallContractCodeOperation(account.AccountID, contractBinary), + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + tx, err := txnbuild.NewTransaction(params) + assert.NoError(t, err) + + sendSuccessfulTransaction(t, client, kp, tx) + + contractHash := sha256.Sum256(contractBinary) + keyB64, err := xdr.MarshalBase64(xdr.LedgerKey{ + Type: xdr.LedgerEntryTypeContractCode, + ContractCode: &xdr.LedgerKeyContractCode{ + Hash: contractHash, + }, + }) + require.NoError(t, err) + request := methods.GetLedgerEntryRequest{ + Key: keyB64, + } + + var result methods.GetLedgerEntryResponse + err = client.CallResult(context.Background(), "getLedgerEntry", request, &result) + assert.NoError(t, err) + assert.Greater(t, result.LatestLedger, uint32(0)) + assert.GreaterOrEqual(t, result.LatestLedger, result.LastModifiedLedger) + var entry xdr.LedgerEntryData + assert.NoError(t, xdr.SafeUnmarshalBase64(result.XDR, &entry)) + assert.Equal(t, contractBinary, entry.MustContractCode().Code) +} diff --git a/cmd/soroban-rpc/internal/test/get_network_test.go b/cmd/soroban-rpc/internal/test/get_network_test.go new file mode 100644 index 00000000..39805d86 --- /dev/null +++ b/cmd/soroban-rpc/internal/test/get_network_test.go @@ -0,0 +1,28 @@ +package test + +import ( + "context" + "testing" + + "github.com/creachadair/jrpc2" + "github.com/creachadair/jrpc2/jhttp" + "github.com/stretchr/testify/assert" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/methods" +) + +func TestGetNetworkSucceeds(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + request := methods.GetNetworkRequest{} + + var result methods.GetNetworkResponse + err := client.CallResult(context.Background(), "getNetwork", request, &result) + assert.NoError(t, err) + assert.Equal(t, friendbotURL, result.FriendbotURL) + assert.Equal(t, StandaloneNetworkPassphrase, result.Passphrase) + assert.Equal(t, stellarCoreProtocolVersion, result.ProtocolVersion) +} diff --git a/cmd/soroban-rpc/internal/test/health_test.go b/cmd/soroban-rpc/internal/test/health_test.go new file mode 100644 index 00000000..4afbf7f8 --- /dev/null +++ b/cmd/soroban-rpc/internal/test/health_test.go @@ -0,0 +1,24 @@ +package test + +import ( + "context" + "testing" + + "github.com/creachadair/jrpc2" + "github.com/creachadair/jrpc2/jhttp" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/methods" + "github.com/stretchr/testify/assert" +) + +func TestHealth(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + var result methods.HealthCheckResult + if err := client.CallResult(context.Background(), "getHealth", nil, &result); err != nil { + t.Fatalf("rpc call failed: %v", err) + } + assert.Equal(t, methods.HealthCheckResult{Status: "healthy"}, result) +} diff --git a/cmd/soroban-rpc/internal/test/integration.go b/cmd/soroban-rpc/internal/test/integration.go new file mode 100644 index 00000000..ea918d13 --- /dev/null +++ b/cmd/soroban-rpc/internal/test/integration.go @@ -0,0 +1,356 @@ +package test + +import ( + "context" + "fmt" + "os" + "os/exec" + "os/signal" + "path" + "path/filepath" + "strconv" + "sync" + "syscall" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/spf13/cobra" + "github.com/stellar/go/clients/stellarcore" + "github.com/stellar/go/keypair" + "github.com/stellar/go/txnbuild" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/config" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/daemon" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/db" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/ledgerbucketwindow" +) + +const ( + StandaloneNetworkPassphrase = "Standalone Network ; February 2017" + stellarCoreProtocolVersion = 20 + stellarCorePort = 11626 + goModFile = "go.mod" + goMonorepoGithubPath = "github.com/stellar/go" + friendbotURL = "http://localhost:8000/friendbot" + // Needed when Core is run with ARTIFICIALLY_ACCELERATE_TIME_FOR_TESTING=true + checkpointFrequency = 8 + sorobanRPCPort = 8000 + adminPort = 8080 + helloWorldContractPath = "../../../../target/wasm32-unknown-unknown/test-wasms/test_hello_world.wasm" +) + +type Test struct { + t *testing.T + + composePath string // docker compose yml file + + daemon *daemon.Daemon + + coreClient *stellarcore.Client + + masterAccount txnbuild.Account + shutdownOnce sync.Once + shutdownCalls []func() +} + +func NewTest(t *testing.T) *Test { + if os.Getenv("SOROBAN_RPC_INTEGRATION_TESTS_ENABLED") == "" { + t.Skip("skipping integration test: SOROBAN_RPC_INTEGRATION_TESTS_ENABLED not set") + } + coreBinaryPath := os.Getenv("SOROBAN_RPC_INTEGRATION_TESTS_CAPTIVE_CORE_BIN") + if coreBinaryPath == "" { + t.Fatal("missing SOROBAN_RPC_INTEGRATION_TESTS_CAPTIVE_CORE_BIN") + } + + i := &Test{ + t: t, + composePath: findDockerComposePath(), + } + i.masterAccount = &txnbuild.SimpleAccount{ + AccountID: i.MasterKey().Address(), + Sequence: 0, + } + i.runComposeCommand("up", "--detach", "--quiet-pull", "--no-color") + i.prepareShutdownHandlers() + i.coreClient = &stellarcore.Client{URL: "http://localhost:" + strconv.Itoa(stellarCorePort)} + i.waitForCore() + i.waitForCheckpoint() + i.launchDaemon(coreBinaryPath) + + return i +} + +func (i *Test) MasterKey() *keypair.Full { + return keypair.Root(StandaloneNetworkPassphrase) +} + +func (i *Test) MasterAccount() txnbuild.Account { + return i.masterAccount +} + +func (i *Test) sorobanRPCURL() string { + return fmt.Sprintf("http://localhost:%d", sorobanRPCPort) +} + +func (i *Test) adminURL() string { + return fmt.Sprintf("http://localhost:%d", adminPort) +} + +func (i *Test) waitForCheckpoint() { + i.t.Log("Waiting for core to be up...") + for t := 30 * time.Second; t >= 0; t -= time.Second { + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + info, err := i.coreClient.Info(ctx) + cancel() + if err != nil { + i.t.Logf("could not obtain info response: %v", err) + time.Sleep(time.Second) + continue + } + if info.Info.Ledger.Num <= checkpointFrequency { + i.t.Logf("checkpoint not reached yet: %v", info) + time.Sleep(time.Second) + continue + } + return + } + i.t.Fatal("Core could not reach checkpoint ledger after 30s") +} + +func (i *Test) launchDaemon(coreBinaryPath string) { + var config config.Config + cmd := &cobra.Command{} + if err := config.AddFlags(cmd); err != nil { + i.t.FailNow() + } + if err := config.SetValues(func(string) (string, bool) { return "", false }); err != nil { + i.t.FailNow() + } + + config.Endpoint = fmt.Sprintf("localhost:%d", sorobanRPCPort) + config.AdminEndpoint = fmt.Sprintf("localhost:%d", adminPort) + config.StellarCoreURL = "http://localhost:" + strconv.Itoa(stellarCorePort) + config.CoreRequestTimeout = time.Second * 2 + config.StellarCoreBinaryPath = coreBinaryPath + config.CaptiveCoreConfigPath = path.Join(i.composePath, "captive-core-integration-tests.cfg") + config.CaptiveCoreStoragePath = i.t.TempDir() + config.CaptiveCoreHTTPPort = 0 + config.FriendbotURL = friendbotURL + config.NetworkPassphrase = StandaloneNetworkPassphrase + config.HistoryArchiveURLs = []string{"http://localhost:1570"} + config.LogLevel = logrus.DebugLevel + config.SQLiteDBPath = path.Join(i.t.TempDir(), "soroban_rpc.sqlite") + config.IngestionTimeout = 10 * time.Minute + config.EventLedgerRetentionWindow = ledgerbucketwindow.DefaultEventLedgerRetentionWindow + config.CheckpointFrequency = checkpointFrequency + config.MaxHealthyLedgerLatency = time.Second * 10 + config.PreflightEnableDebug = true + + i.daemon = daemon.MustNew(&config) + go i.daemon.Run() + + // wait for the storage to catch up for 1 minute + info, err := i.coreClient.Info(context.Background()) + if err != nil { + i.t.Fatalf("cannot obtain latest ledger from core: %v", err) + } + targetLedgerSequence := uint32(info.Info.Ledger.Num) + + reader := db.NewLedgerEntryReader(i.daemon.GetDB()) + success := false + for t := 30; t >= 0; t -= 1 { + sequence, err := reader.GetLatestLedgerSequence(context.Background()) + if err != nil { + if err != db.ErrEmptyDB { + i.t.Fatalf("cannot access ledger entry storage: %v", err) + } + } else { + if sequence >= targetLedgerSequence { + success = true + break + } + } + time.Sleep(time.Second) + } + if !success { + i.t.Fatalf("LedgerEntryStorage failed to sync in 1 minute") + } +} + +// Runs a docker-compose command applied to the above configs +func (i *Test) runComposeCommand(args ...string) { + integrationYaml := filepath.Join(i.composePath, "docker-compose.yml") + + cmdline := append([]string{"-f", integrationYaml}, args...) + cmd := exec.Command("docker-compose", cmdline...) + + i.t.Log("Running", cmd.Env, cmd.Args) + out, innerErr := cmd.Output() + if exitErr, ok := innerErr.(*exec.ExitError); ok { + fmt.Printf("stdout:\n%s\n", string(out)) + fmt.Printf("stderr:\n%s\n", string(exitErr.Stderr)) + } + + if innerErr != nil { + i.t.Fatalf("Compose command failed: %v", innerErr) + } +} + +func (i *Test) prepareShutdownHandlers() { + i.shutdownCalls = append(i.shutdownCalls, + func() { + if i.daemon != nil { + i.daemon.Close() + } + i.runComposeCommand("down", "-v") + }, + ) + + // Register cleanup handlers (on panic and ctrl+c) so the containers are + // stopped even if ingestion or testing fails. + i.t.Cleanup(i.Shutdown) + + c := make(chan os.Signal, 1) + signal.Notify(c, os.Interrupt, syscall.SIGTERM) + go func() { + <-c + i.Shutdown() + os.Exit(int(syscall.SIGTERM)) + }() +} + +// Shutdown stops the integration tests and destroys all its associated +// resources. It will be implicitly called when the calling test (i.e. the +// `testing.Test` passed to `New()`) is finished if it hasn't been explicitly +// called before. +func (i *Test) Shutdown() { + i.shutdownOnce.Do(func() { + // run them in the opposite order in which they where added + for callI := len(i.shutdownCalls) - 1; callI >= 0; callI-- { + i.shutdownCalls[callI]() + } + }) +} + +// Wait for core to be up and manually close the first ledger +func (i *Test) waitForCore() { + i.t.Log("Waiting for core to be up...") + for t := 30 * time.Second; t >= 0; t -= time.Second { + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + _, err := i.coreClient.Info(ctx) + cancel() + if err != nil { + i.t.Logf("could not obtain info response: %v", err) + time.Sleep(time.Second) + continue + } + break + } + + i.UpgradeProtocol(stellarCoreProtocolVersion) + + for t := 0; t < 5; t++ { + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + info, err := i.coreClient.Info(ctx) + cancel() + if err != nil || !info.IsSynced() { + i.t.Logf("Core is still not synced: %v %v", err, info) + time.Sleep(time.Second) + continue + } + i.t.Log("Core is up.") + return + } + i.t.Fatal("Core could not sync after 30s") +} + +// UpgradeProtocol arms Core with upgrade and blocks until protocol is upgraded. +func (i *Test) UpgradeProtocol(version uint32) { + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + err := i.coreClient.Upgrade(ctx, int(version)) + cancel() + if err != nil { + i.t.Fatalf("could not upgrade protocol: %v", err) + } + + for t := 0; t < 10; t++ { + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + info, err := i.coreClient.Info(ctx) + cancel() + if err != nil { + i.t.Logf("could not obtain info response: %v", err) + time.Sleep(time.Second) + continue + } + + if info.Info.Ledger.Version == int(version) { + i.t.Logf("Protocol upgraded to: %d", info.Info.Ledger.Version) + return + } + time.Sleep(time.Second) + } + + i.t.Fatalf("could not upgrade protocol in 10s") +} + +// Cluttering code with if err != nil is absolute nonsense. +func panicIf(err error) { + if err != nil { + panic(err) + } +} + +// findProjectRoot iterates upward on the directory until go.mod file is found. +func findProjectRoot(current string) string { + // Lets you check if a particular directory contains a file. + directoryContainsFilename := func(dir string, filename string) bool { + files, innerErr := os.ReadDir(dir) + panicIf(innerErr) + + for _, file := range files { + if file.Name() == filename { + return true + } + } + return false + } + var err error + + // In either case, we try to walk up the tree until we find "go.mod", + // which we hope is the root directory of the project. + for !directoryContainsFilename(current, goModFile) { + current, err = filepath.Abs(filepath.Join(current, "..")) + + // FIXME: This only works on *nix-like systems. + if err != nil || filepath.Base(current)[0] == filepath.Separator { + fmt.Println("Failed to establish project root directory.") + panic(err) + } + } + return current +} + +// findDockerComposePath performs a best-effort attempt to find the project's +// Docker Compose files. +func findDockerComposePath() string { + current, err := os.Getwd() + panicIf(err) + + // + // We have a primary and backup attempt for finding the necessary docker + // files: via $GOPATH and via local directory traversal. + // + + if gopath := os.Getenv("GOPATH"); gopath != "" { + monorepo := filepath.Join(gopath, "src", "github.com", "stellar", "soroban-tools") + if _, err = os.Stat(monorepo); !os.IsNotExist(err) { + current = monorepo + } + } + + current = findProjectRoot(current) + + // Directly jump down to the folder that should contain the configs + return filepath.Join(current, "cmd", "soroban-rpc", "internal", "test") +} diff --git a/cmd/soroban-rpc/internal/test/integration_test.go b/cmd/soroban-rpc/internal/test/integration_test.go new file mode 100644 index 00000000..684a61ad --- /dev/null +++ b/cmd/soroban-rpc/internal/test/integration_test.go @@ -0,0 +1,15 @@ +package test + +import ( + "fmt" + "testing" +) + +func TestFindDockerComposePath(t *testing.T) { + dockerPath := findDockerComposePath() + + if len(dockerPath) == 0 { + t.Fail() + } + fmt.Printf("docker compose path is %s\n", dockerPath) +} diff --git a/cmd/soroban-rpc/internal/test/metrics_test.go b/cmd/soroban-rpc/internal/test/metrics_test.go new file mode 100644 index 00000000..5608a2f9 --- /dev/null +++ b/cmd/soroban-rpc/internal/test/metrics_test.go @@ -0,0 +1,39 @@ +package test + +import ( + "fmt" + "io" + "net/http" + "net/url" + "runtime" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/config" +) + +func TestMetrics(t *testing.T) { + test := NewTest(t) + metrics := getMetrics(test) + buildMetric := fmt.Sprintf( + "soroban_rpc_build_info{branch=\"%s\",build_timestamp=\"%s\",commit=\"%s\",goversion=\"%s\",version=\"%s\"} 1", + config.Branch, + config.BuildTimestamp, + config.CommitHash, + runtime.Version(), + config.Version, + ) + require.Contains(t, metrics, buildMetric) +} + +func getMetrics(test *Test) string { + metricsURL, err := url.JoinPath(test.adminURL(), "/metrics") + require.NoError(test.t, err) + response, err := http.Get(metricsURL) + require.NoError(test.t, err) + responseBytes, err := io.ReadAll(response.Body) + require.NoError(test.t, err) + require.NoError(test.t, response.Body.Close()) + return string(responseBytes) +} diff --git a/cmd/soroban-rpc/internal/test/simulate_transaction_test.go b/cmd/soroban-rpc/internal/test/simulate_transaction_test.go new file mode 100644 index 00000000..ec785050 --- /dev/null +++ b/cmd/soroban-rpc/internal/test/simulate_transaction_test.go @@ -0,0 +1,1136 @@ +package test + +import ( + "context" + "crypto/sha256" + "fmt" + "os" + "path" + "runtime" + "testing" + "time" + + "github.com/creachadair/jrpc2" + "github.com/creachadair/jrpc2/jhttp" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/stellar/go/keypair" + "github.com/stellar/go/txnbuild" + "github.com/stellar/go/xdr" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/methods" +) + +var ( + testSalt = sha256.Sum256([]byte("a1")) +) + +func getHelloWorldContract(t *testing.T) []byte { + _, filename, _, _ := runtime.Caller(0) + testDirName := path.Dir(filename) + contractFile := path.Join(testDirName, helloWorldContractPath) + ret, err := os.ReadFile(contractFile) + if err != nil { + t.Fatalf("unable to read test_hello_world.wasm (%v) please run `make build-test-wasms` at the project root directory", err) + } + return ret +} + +func createInvokeHostOperation(sourceAccount string, contractID xdr.Hash, method string, args ...xdr.ScVal) *txnbuild.InvokeHostFunction { + return &txnbuild.InvokeHostFunction{ + HostFunction: xdr.HostFunction{ + Type: xdr.HostFunctionTypeHostFunctionTypeInvokeContract, + InvokeContract: &xdr.InvokeContractArgs{ + ContractAddress: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &contractID, + }, + FunctionName: xdr.ScSymbol(method), + Args: args, + }, + }, + Auth: nil, + SourceAccount: sourceAccount, + } +} + +func createInstallContractCodeOperation(sourceAccount string, contractCode []byte) *txnbuild.InvokeHostFunction { + return &txnbuild.InvokeHostFunction{ + HostFunction: xdr.HostFunction{ + Type: xdr.HostFunctionTypeHostFunctionTypeUploadContractWasm, + Wasm: &contractCode, + }, + SourceAccount: sourceAccount, + } +} + +func createCreateContractOperation(sourceAccount string, contractCode []byte) *txnbuild.InvokeHostFunction { + saltParam := xdr.Uint256(testSalt) + contractHash := xdr.Hash(sha256.Sum256(contractCode)) + + sourceAccountID := xdr.MustAddress(sourceAccount) + return &txnbuild.InvokeHostFunction{ + HostFunction: xdr.HostFunction{ + Type: xdr.HostFunctionTypeHostFunctionTypeCreateContract, + CreateContract: &xdr.CreateContractArgs{ + ContractIdPreimage: xdr.ContractIdPreimage{ + Type: xdr.ContractIdPreimageTypeContractIdPreimageFromAddress, + FromAddress: &xdr.ContractIdPreimageFromAddress{ + Address: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeAccount, + AccountId: &sourceAccountID, + }, + Salt: saltParam, + }, + }, + Executable: xdr.ContractExecutable{ + Type: xdr.ContractExecutableTypeContractExecutableWasm, + WasmHash: &contractHash, + }, + }, + }, + Auth: []xdr.SorobanAuthorizationEntry{}, + SourceAccount: sourceAccount, + } +} + +func getContractID(t *testing.T, sourceAccount string, salt [32]byte, networkPassphrase string) [32]byte { + sourceAccountID := xdr.MustAddress(sourceAccount) + preImage := xdr.HashIdPreimage{ + Type: xdr.EnvelopeTypeEnvelopeTypeContractId, + ContractId: &xdr.HashIdPreimageContractId{ + NetworkId: sha256.Sum256([]byte(networkPassphrase)), + ContractIdPreimage: xdr.ContractIdPreimage{ + Type: xdr.ContractIdPreimageTypeContractIdPreimageFromAddress, + FromAddress: &xdr.ContractIdPreimageFromAddress{ + Address: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeAccount, + AccountId: &sourceAccountID, + }, + Salt: salt, + }, + }, + }, + } + + xdrPreImageBytes, err := preImage.MarshalBinary() + require.NoError(t, err) + hashedContractID := sha256.Sum256(xdrPreImageBytes) + return hashedContractID +} + +func simulateTransactionFromTxParams(t *testing.T, client *jrpc2.Client, params txnbuild.TransactionParams) methods.SimulateTransactionResponse { + savedAutoIncrement := params.IncrementSequenceNum + params.IncrementSequenceNum = false + tx, err := txnbuild.NewTransaction(params) + assert.NoError(t, err) + params.IncrementSequenceNum = savedAutoIncrement + txB64, err := tx.Base64() + assert.NoError(t, err) + request := methods.SimulateTransactionRequest{Transaction: txB64} + var response methods.SimulateTransactionResponse + err = client.CallResult(context.Background(), "simulateTransaction", request, &response) + assert.NoError(t, err) + return response +} + +func preflightTransactionParamsLocally(t *testing.T, params txnbuild.TransactionParams, response methods.SimulateTransactionResponse) txnbuild.TransactionParams { + if !assert.Empty(t, response.Error) { + fmt.Println(response.Error) + } + var transactionData xdr.SorobanTransactionData + err := xdr.SafeUnmarshalBase64(response.TransactionData, &transactionData) + require.NoError(t, err) + + op := params.Operations[0] + switch v := op.(type) { + case *txnbuild.InvokeHostFunction: + require.Len(t, response.Results, 1) + v.Ext = xdr.TransactionExt{ + V: 1, + SorobanData: &transactionData, + } + var auth []xdr.SorobanAuthorizationEntry + for _, b64 := range response.Results[0].Auth { + var a xdr.SorobanAuthorizationEntry + err := xdr.SafeUnmarshalBase64(b64, &a) + assert.NoError(t, err) + auth = append(auth, a) + } + v.Auth = auth + case *txnbuild.ExtendFootprintTtl: + require.Len(t, response.Results, 0) + v.Ext = xdr.TransactionExt{ + V: 1, + SorobanData: &transactionData, + } + case *txnbuild.RestoreFootprint: + require.Len(t, response.Results, 0) + v.Ext = xdr.TransactionExt{ + V: 1, + SorobanData: &transactionData, + } + default: + t.Fatalf("Wrong operation type %v", op) + } + + params.Operations = []txnbuild.Operation{op} + + params.BaseFee += response.MinResourceFee + return params +} + +func preflightTransactionParams(t *testing.T, client *jrpc2.Client, params txnbuild.TransactionParams) txnbuild.TransactionParams { + response := simulateTransactionFromTxParams(t, client, params) + // The preamble should be zero except for the special restore case + assert.Nil(t, response.RestorePreamble) + return preflightTransactionParamsLocally(t, params, response) +} + +func TestSimulateTransactionSucceeds(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + sourceAccount := keypair.Root(StandaloneNetworkPassphrase).Address() + contractBinary := getHelloWorldContract(t) + params := txnbuild.TransactionParams{ + SourceAccount: &txnbuild.SimpleAccount{ + AccountID: sourceAccount, + Sequence: 0, + }, + IncrementSequenceNum: false, + Operations: []txnbuild.Operation{ + createInstallContractCodeOperation(sourceAccount, contractBinary), + }, + BaseFee: txnbuild.MinBaseFee, + Memo: nil, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + } + result := simulateTransactionFromTxParams(t, client, params) + + contractHash := sha256.Sum256(contractBinary) + contractHashBytes := xdr.ScBytes(contractHash[:]) + expectedXdr := xdr.ScVal{Type: xdr.ScValTypeScvBytes, Bytes: &contractHashBytes} + assert.Greater(t, result.LatestLedger, uint32(0)) + assert.Greater(t, result.Cost.CPUInstructions, uint64(0)) + assert.Greater(t, result.Cost.MemoryBytes, uint64(0)) + + expectedTransactionData := xdr.SorobanTransactionData{ + Resources: xdr.SorobanResources{ + Footprint: xdr.LedgerFootprint{ + ReadWrite: []xdr.LedgerKey{ + { + Type: xdr.LedgerEntryTypeContractCode, + ContractCode: &xdr.LedgerKeyContractCode{ + Hash: xdr.Hash(contractHash), + }, + }, + }, + }, + Instructions: 4378462, + ReadBytes: 0, + WriteBytes: 7048, + }, + // the resulting fee is derived from the compute factors and a default padding is applied to instructions by preflight + // for test purposes, the most deterministic way to assert the resulting fee is expected value in test scope, is to capture + // the resulting fee from current preflight output and re-plug it in here, rather than try to re-implement the cost-model algo + // in the test. + ResourceFee: 132146, + } + + // First, decode and compare the transaction data so we get a decent diff if it fails. + var transactionData xdr.SorobanTransactionData + err := xdr.SafeUnmarshalBase64(result.TransactionData, &transactionData) + assert.NoError(t, err) + assert.Equal(t, expectedTransactionData.Resources.Footprint, transactionData.Resources.Footprint) + assert.InDelta(t, uint32(expectedTransactionData.Resources.Instructions), uint32(transactionData.Resources.Instructions), 3200000) + assert.InDelta(t, uint32(expectedTransactionData.Resources.ReadBytes), uint32(transactionData.Resources.ReadBytes), 10) + assert.InDelta(t, uint32(expectedTransactionData.Resources.WriteBytes), uint32(transactionData.Resources.WriteBytes), 300) + assert.InDelta(t, int64(expectedTransactionData.ResourceFee), int64(transactionData.ResourceFee), 4000) + + // Then decode and check the result xdr, separately so we get a decent diff if it fails. + assert.Len(t, result.Results, 1) + var resultXdr xdr.ScVal + err = xdr.SafeUnmarshalBase64(result.Results[0].XDR, &resultXdr) + assert.NoError(t, err) + assert.Equal(t, expectedXdr, resultXdr) + + // test operation which does not have a source account + withoutSourceAccountOp := createInstallContractCodeOperation("", contractBinary) + params = txnbuild.TransactionParams{ + SourceAccount: &txnbuild.SimpleAccount{ + AccountID: sourceAccount, + Sequence: 0, + }, + IncrementSequenceNum: false, + Operations: []txnbuild.Operation{withoutSourceAccountOp}, + BaseFee: txnbuild.MinBaseFee, + Memo: nil, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + } + require.NoError(t, err) + + resultForRequestWithoutOpSource := simulateTransactionFromTxParams(t, client, params) + // Let's not compare the latest ledger since it may change + result.LatestLedger = resultForRequestWithoutOpSource.LatestLedger + assert.Equal(t, result, resultForRequestWithoutOpSource) + + // test that operation source account takes precedence over tx source account + params = txnbuild.TransactionParams{ + SourceAccount: &txnbuild.SimpleAccount{ + AccountID: keypair.Root("test passphrase").Address(), + Sequence: 0, + }, + IncrementSequenceNum: false, + Operations: []txnbuild.Operation{ + createInstallContractCodeOperation(sourceAccount, contractBinary), + }, + BaseFee: txnbuild.MinBaseFee, + Memo: nil, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + } + + resultForRequestWithDifferentTxSource := simulateTransactionFromTxParams(t, client, params) + assert.GreaterOrEqual(t, resultForRequestWithDifferentTxSource.LatestLedger, result.LatestLedger) + // apart from latest ledger the response should be the same + resultForRequestWithDifferentTxSource.LatestLedger = result.LatestLedger + assert.Equal(t, result, resultForRequestWithDifferentTxSource) +} + +func TestSimulateTransactionWithAuth(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + sourceAccount := keypair.Root(StandaloneNetworkPassphrase) + address := sourceAccount.Address() + account := txnbuild.NewSimpleAccount(address, 0) + + helloWorldContract := getHelloWorldContract(t) + + params := preflightTransactionParams(t, client, txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + createInstallContractCodeOperation(account.AccountID, helloWorldContract), + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + + tx, err := txnbuild.NewTransaction(params) + assert.NoError(t, err) + sendSuccessfulTransaction(t, client, sourceAccount, tx) + + deployContractOp := createCreateContractOperation(address, helloWorldContract) + deployContractParams := txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + deployContractOp, + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + } + response := simulateTransactionFromTxParams(t, client, deployContractParams) + require.NotEmpty(t, response.Results) + require.Len(t, response.Results[0].Auth, 1) + require.Empty(t, deployContractOp.Auth) + + var auth xdr.SorobanAuthorizationEntry + assert.NoError(t, xdr.SafeUnmarshalBase64(response.Results[0].Auth[0], &auth)) + require.Equal(t, auth.Credentials.Type, xdr.SorobanCredentialsTypeSorobanCredentialsSourceAccount) + deployContractOp.Auth = append(deployContractOp.Auth, auth) + deployContractParams.Operations = []txnbuild.Operation{deployContractOp} + + // preflight deployContractOp with auth + deployContractParams = preflightTransactionParams(t, client, deployContractParams) + tx, err = txnbuild.NewTransaction(deployContractParams) + assert.NoError(t, err) + sendSuccessfulTransaction(t, client, sourceAccount, tx) +} + +func TestSimulateInvokeContractTransactionSucceeds(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + sourceAccount := keypair.Root(StandaloneNetworkPassphrase) + address := sourceAccount.Address() + account := txnbuild.NewSimpleAccount(address, 0) + + helloWorldContract := getHelloWorldContract(t) + + params := preflightTransactionParams(t, client, txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + createInstallContractCodeOperation(account.AccountID, helloWorldContract), + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + + tx, err := txnbuild.NewTransaction(params) + assert.NoError(t, err) + sendSuccessfulTransaction(t, client, sourceAccount, tx) + + params = preflightTransactionParams(t, client, txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + createCreateContractOperation(address, helloWorldContract), + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + + tx, err = txnbuild.NewTransaction(params) + assert.NoError(t, err) + sendSuccessfulTransaction(t, client, sourceAccount, tx) + + contractID := getContractID(t, address, testSalt, StandaloneNetworkPassphrase) + contractFnParameterSym := xdr.ScSymbol("world") + authAddrArg := "GBRPYHIL2CI3FNQ4BXLFMNDLFJUNPU2HY3ZMFSHONUCEOASW7QC7OX2H" + authAccountIDArg := xdr.MustAddress(authAddrArg) + tx, err = txnbuild.NewTransaction(txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + &txnbuild.CreateAccount{ + Destination: authAddrArg, + Amount: "100000", + SourceAccount: address, + }, + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + assert.NoError(t, err) + sendSuccessfulTransaction(t, client, sourceAccount, tx) + params = txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: false, + Operations: []txnbuild.Operation{ + createInvokeHostOperation( + address, + contractID, + "auth", + xdr.ScVal{ + Type: xdr.ScValTypeScvAddress, + Address: &xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeAccount, + AccountId: &authAccountIDArg, + }, + }, + xdr.ScVal{ + Type: xdr.ScValTypeScvSymbol, + Sym: &contractFnParameterSym, + }, + ), + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + } + tx, err = txnbuild.NewTransaction(params) + + assert.NoError(t, err) + + txB64, err := tx.Base64() + assert.NoError(t, err) + + request := methods.SimulateTransactionRequest{Transaction: txB64} + var response methods.SimulateTransactionResponse + err = client.CallResult(context.Background(), "simulateTransaction", request, &response) + assert.NoError(t, err) + assert.Empty(t, response.Error) + + // check the result + assert.Len(t, response.Results, 1) + var obtainedResult xdr.ScVal + err = xdr.SafeUnmarshalBase64(response.Results[0].XDR, &obtainedResult) + assert.NoError(t, err) + assert.Equal(t, xdr.ScValTypeScvAddress, obtainedResult.Type) + require.NotNil(t, obtainedResult.Address) + assert.Equal(t, authAccountIDArg, obtainedResult.Address.MustAccountId()) + + // check the footprint + var obtainedTransactionData xdr.SorobanTransactionData + err = xdr.SafeUnmarshalBase64(response.TransactionData, &obtainedTransactionData) + obtainedFootprint := obtainedTransactionData.Resources.Footprint + assert.NoError(t, err) + assert.Len(t, obtainedFootprint.ReadWrite, 1) + assert.Len(t, obtainedFootprint.ReadOnly, 3) + ro0 := obtainedFootprint.ReadOnly[0] + assert.Equal(t, xdr.LedgerEntryTypeAccount, ro0.Type) + assert.Equal(t, authAddrArg, ro0.Account.AccountId.Address()) + ro1 := obtainedFootprint.ReadOnly[1] + assert.Equal(t, xdr.LedgerEntryTypeContractData, ro1.Type) + assert.Equal(t, xdr.ScAddressTypeScAddressTypeContract, ro1.ContractData.Contract.Type) + assert.Equal(t, xdr.Hash(contractID), *ro1.ContractData.Contract.ContractId) + assert.Equal(t, xdr.ScValTypeScvLedgerKeyContractInstance, ro1.ContractData.Key.Type) + ro2 := obtainedFootprint.ReadOnly[2] + assert.Equal(t, xdr.LedgerEntryTypeContractCode, ro2.Type) + contractHash := sha256.Sum256(helloWorldContract) + assert.Equal(t, xdr.Hash(contractHash), ro2.ContractCode.Hash) + assert.NoError(t, err) + + assert.NotZero(t, obtainedTransactionData.ResourceFee) + assert.NotZero(t, obtainedTransactionData.Resources.Instructions) + assert.NotZero(t, obtainedTransactionData.Resources.ReadBytes) + assert.NotZero(t, obtainedTransactionData.Resources.WriteBytes) + + // check the auth + assert.Len(t, response.Results[0].Auth, 1) + var obtainedAuth xdr.SorobanAuthorizationEntry + err = xdr.SafeUnmarshalBase64(response.Results[0].Auth[0], &obtainedAuth) + assert.NoError(t, err) + assert.Equal(t, obtainedAuth.Credentials.Type, xdr.SorobanCredentialsTypeSorobanCredentialsAddress) + assert.Equal(t, obtainedAuth.Credentials.Address.Signature.Type, xdr.ScValTypeScvVoid) + + assert.NotZero(t, obtainedAuth.Credentials.Address.Nonce) + assert.Equal(t, xdr.ScAddressTypeScAddressTypeAccount, obtainedAuth.Credentials.Address.Address.Type) + assert.Equal(t, authAddrArg, obtainedAuth.Credentials.Address.Address.AccountId.Address()) + + assert.Equal(t, xdr.SorobanCredentialsTypeSorobanCredentialsAddress, obtainedAuth.Credentials.Type) + assert.Equal(t, xdr.ScAddressTypeScAddressTypeAccount, obtainedAuth.Credentials.Address.Address.Type) + assert.Equal(t, authAddrArg, obtainedAuth.Credentials.Address.Address.AccountId.Address()) + assert.Equal(t, xdr.SorobanAuthorizedFunctionTypeSorobanAuthorizedFunctionTypeContractFn, obtainedAuth.RootInvocation.Function.Type) + assert.Equal(t, xdr.ScSymbol("auth"), obtainedAuth.RootInvocation.Function.ContractFn.FunctionName) + assert.Len(t, obtainedAuth.RootInvocation.Function.ContractFn.Args, 2) + world := obtainedAuth.RootInvocation.Function.ContractFn.Args[1] + assert.Equal(t, xdr.ScValTypeScvSymbol, world.Type) + assert.Equal(t, xdr.ScSymbol("world"), *world.Sym) + assert.Nil(t, obtainedAuth.RootInvocation.SubInvocations) + + // check the events. There will be 2 debug events and the event emitted by the "auth" function + // which is the one we are going to check. + assert.Len(t, response.Events, 3) + var event xdr.DiagnosticEvent + err = xdr.SafeUnmarshalBase64(response.Events[1], &event) + assert.NoError(t, err) + assert.True(t, event.InSuccessfulContractCall) + assert.NotNil(t, event.Event.ContractId) + assert.Equal(t, xdr.Hash(contractID), *event.Event.ContractId) + assert.Equal(t, xdr.ContractEventTypeContract, event.Event.Type) + assert.Equal(t, int32(0), event.Event.Body.V) + assert.Equal(t, xdr.ScValTypeScvSymbol, event.Event.Body.V0.Data.Type) + assert.Equal(t, xdr.ScSymbol("world"), *event.Event.Body.V0.Data.Sym) + assert.Len(t, event.Event.Body.V0.Topics, 1) + assert.Equal(t, xdr.ScValTypeScvString, event.Event.Body.V0.Topics[0].Type) + assert.Equal(t, xdr.ScString("auth"), *event.Event.Body.V0.Topics[0].Str) + metrics := getMetrics(test) + require.Contains(t, metrics, "soroban_rpc_json_rpc_request_duration_seconds_count{endpoint=\"simulateTransaction\",status=\"ok\"} 3") + require.Contains(t, metrics, "soroban_rpc_preflight_pool_request_ledger_get_duration_seconds_count{status=\"ok\",type=\"db\"} 3") + require.Contains(t, metrics, "soroban_rpc_preflight_pool_request_ledger_get_duration_seconds_count{status=\"ok\",type=\"all\"} 3") + require.Contains(t, metrics, "soroban_rpc_preflight_pool_request_ledger_entries_fetched_sum 67") +} + +func TestSimulateTransactionError(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + sourceAccount := keypair.Root(StandaloneNetworkPassphrase).Address() + invokeHostOp := createInvokeHostOperation(sourceAccount, xdr.Hash{}, "noMethod") + invokeHostOp.HostFunction = xdr.HostFunction{ + Type: xdr.HostFunctionTypeHostFunctionTypeInvokeContract, + InvokeContract: &xdr.InvokeContractArgs{ + ContractAddress: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &xdr.Hash{0x1, 0x2}, + }, + FunctionName: "", + Args: nil, + }, + } + params := txnbuild.TransactionParams{ + SourceAccount: &txnbuild.SimpleAccount{ + AccountID: keypair.Root(StandaloneNetworkPassphrase).Address(), + Sequence: 0, + }, + IncrementSequenceNum: false, + Operations: []txnbuild.Operation{invokeHostOp}, + BaseFee: txnbuild.MinBaseFee, + Memo: nil, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + } + result := simulateTransactionFromTxParams(t, client, params) + assert.Greater(t, result.LatestLedger, uint32(0)) + assert.Contains(t, result.Error, "MissingValue") + require.Len(t, result.Events, 1) + var event xdr.DiagnosticEvent + require.NoError(t, xdr.SafeUnmarshalBase64(result.Events[0], &event)) +} + +func TestSimulateTransactionMultipleOperations(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + sourceAccount := keypair.Root(StandaloneNetworkPassphrase).Address() + contractBinary := getHelloWorldContract(t) + params := txnbuild.TransactionParams{ + SourceAccount: &txnbuild.SimpleAccount{ + AccountID: keypair.Root(StandaloneNetworkPassphrase).Address(), + Sequence: 0, + }, + IncrementSequenceNum: false, + Operations: []txnbuild.Operation{ + createInstallContractCodeOperation(sourceAccount, contractBinary), + createCreateContractOperation(sourceAccount, contractBinary), + }, + BaseFee: txnbuild.MinBaseFee, + Memo: nil, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + } + + result := simulateTransactionFromTxParams(t, client, params) + assert.Equal( + t, + methods.SimulateTransactionResponse{ + Error: "Transaction contains more than one operation", + }, + result, + ) +} + +func TestSimulateTransactionWithoutInvokeHostFunction(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + params := txnbuild.TransactionParams{ + SourceAccount: &txnbuild.SimpleAccount{ + AccountID: keypair.Root(StandaloneNetworkPassphrase).Address(), + Sequence: 0, + }, + IncrementSequenceNum: false, + Operations: []txnbuild.Operation{ + &txnbuild.BumpSequence{BumpTo: 1}, + }, + BaseFee: txnbuild.MinBaseFee, + Memo: nil, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + } + result := simulateTransactionFromTxParams(t, client, params) + assert.Equal( + t, + methods.SimulateTransactionResponse{ + Error: "Transaction contains unsupported operation type: OperationTypeBumpSequence", + }, + result, + ) +} + +func TestSimulateTransactionUnmarshalError(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + request := methods.SimulateTransactionRequest{Transaction: "invalid"} + var result methods.SimulateTransactionResponse + err := client.CallResult(context.Background(), "simulateTransaction", request, &result) + assert.NoError(t, err) + assert.Equal( + t, + "Could not unmarshal transaction", + result.Error, + ) +} + +func TestSimulateTransactionExtendAndRestoreFootprint(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + sourceAccount := keypair.Root(StandaloneNetworkPassphrase) + address := sourceAccount.Address() + account := txnbuild.NewSimpleAccount(address, 0) + + helloWorldContract := getHelloWorldContract(t) + + params := preflightTransactionParams(t, client, txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + createInstallContractCodeOperation(account.AccountID, helloWorldContract), + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + tx, err := txnbuild.NewTransaction(params) + assert.NoError(t, err) + sendSuccessfulTransaction(t, client, sourceAccount, tx) + + params = preflightTransactionParams(t, client, txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + createCreateContractOperation(address, helloWorldContract), + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + tx, err = txnbuild.NewTransaction(params) + assert.NoError(t, err) + sendSuccessfulTransaction(t, client, sourceAccount, tx) + + contractID := getContractID(t, address, testSalt, StandaloneNetworkPassphrase) + invokeIncPresistentEntryParams := txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + createInvokeHostOperation( + address, + contractID, + "inc", + ), + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + } + params = preflightTransactionParams(t, client, invokeIncPresistentEntryParams) + tx, err = txnbuild.NewTransaction(params) + assert.NoError(t, err) + sendSuccessfulTransaction(t, client, sourceAccount, tx) + + // get the counter ledger entry TTL + key := getCounterLedgerKey(contractID) + + keyB64, err := xdr.MarshalBase64(key) + require.NoError(t, err) + getLedgerEntryrequest := methods.GetLedgerEntryRequest{ + Key: keyB64, + } + var getLedgerEntryResult methods.GetLedgerEntryResponse + err = client.CallResult(context.Background(), "getLedgerEntry", getLedgerEntryrequest, &getLedgerEntryResult) + assert.NoError(t, err) + + var entry xdr.LedgerEntryData + assert.NoError(t, xdr.SafeUnmarshalBase64(getLedgerEntryResult.XDR, &entry)) + assert.Equal(t, xdr.LedgerEntryTypeContractData, entry.Type) + require.NotNil(t, getLedgerEntryResult.LiveUntilLedgerSeq) + + initialLiveUntil := *getLedgerEntryResult.LiveUntilLedgerSeq + + // Extend the initial TTL + params = preflightTransactionParams(t, client, txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + &txnbuild.ExtendFootprintTtl{ + ExtendTo: 20, + Ext: xdr.TransactionExt{ + V: 1, + SorobanData: &xdr.SorobanTransactionData{ + Resources: xdr.SorobanResources{ + Footprint: xdr.LedgerFootprint{ + ReadOnly: []xdr.LedgerKey{key}, + }, + }, + }, + }, + }, + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + tx, err = txnbuild.NewTransaction(params) + assert.NoError(t, err) + sendSuccessfulTransaction(t, client, sourceAccount, tx) + + err = client.CallResult(context.Background(), "getLedgerEntry", getLedgerEntryrequest, &getLedgerEntryResult) + assert.NoError(t, err) + assert.NoError(t, xdr.SafeUnmarshalBase64(getLedgerEntryResult.XDR, &entry)) + assert.Equal(t, xdr.LedgerEntryTypeContractData, entry.Type) + require.NotNil(t, getLedgerEntryResult.LiveUntilLedgerSeq) + newLiveUntilSeq := *getLedgerEntryResult.LiveUntilLedgerSeq + assert.Greater(t, newLiveUntilSeq, initialLiveUntil) + + // Wait until it is not live anymore + waitUntilLedgerEntryTTL(t, client, key) + + // and restore it + params = preflightTransactionParams(t, client, txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + &txnbuild.RestoreFootprint{ + Ext: xdr.TransactionExt{ + V: 1, + SorobanData: &xdr.SorobanTransactionData{ + Resources: xdr.SorobanResources{ + Footprint: xdr.LedgerFootprint{ + ReadWrite: []xdr.LedgerKey{key}, + }, + }, + }, + }, + }, + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + tx, err = txnbuild.NewTransaction(params) + assert.NoError(t, err) + sendSuccessfulTransaction(t, client, sourceAccount, tx) + + // Wait for TTL again and check the pre-restore field when trying to exec the contract again + waitUntilLedgerEntryTTL(t, client, key) + + simulationResult := simulateTransactionFromTxParams(t, client, invokeIncPresistentEntryParams) + require.NotNil(t, simulationResult.RestorePreamble) + assert.NotZero(t, simulationResult.RestorePreamble) + + params = preflightTransactionParamsLocally(t, + txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + &txnbuild.RestoreFootprint{}, + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }, + methods.SimulateTransactionResponse{ + TransactionData: simulationResult.RestorePreamble.TransactionData, + MinResourceFee: simulationResult.RestorePreamble.MinResourceFee, + }, + ) + tx, err = txnbuild.NewTransaction(params) + assert.NoError(t, err) + sendSuccessfulTransaction(t, client, sourceAccount, tx) + + // Finally, we should be able to send the inc host function invocation now that we + // have pre-restored the entries + params = preflightTransactionParamsLocally(t, invokeIncPresistentEntryParams, simulationResult) + tx, err = txnbuild.NewTransaction(params) + assert.NoError(t, err) + sendSuccessfulTransaction(t, client, sourceAccount, tx) +} + +func getCounterLedgerKey(contractID [32]byte) xdr.LedgerKey { + contractIDHash := xdr.Hash(contractID) + counterSym := xdr.ScSymbol("COUNTER") + key := xdr.LedgerKey{ + Type: xdr.LedgerEntryTypeContractData, + ContractData: &xdr.LedgerKeyContractData{ + Contract: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &contractIDHash, + }, + Key: xdr.ScVal{ + Type: xdr.ScValTypeScvSymbol, + Sym: &counterSym, + }, + Durability: xdr.ContractDataDurabilityPersistent, + }, + } + return key +} + +func waitUntilLedgerEntryTTL(t *testing.T, client *jrpc2.Client, ledgerKey xdr.LedgerKey) { + keyB64, err := xdr.MarshalBase64(ledgerKey) + require.NoError(t, err) + request := methods.GetLedgerEntriesRequest{ + Keys: []string{keyB64}, + } + ttled := false + for i := 0; i < 50; i++ { + var result methods.GetLedgerEntriesResponse + var entry xdr.LedgerEntryData + err := client.CallResult(context.Background(), "getLedgerEntries", request, &result) + require.NoError(t, err) + require.NotEmpty(t, result.Entries) + require.NoError(t, xdr.SafeUnmarshalBase64(result.Entries[0].XDR, &entry)) + require.NotEqual(t, xdr.LedgerEntryTypeTtl, entry.Type) + liveUntilLedgerSeq := xdr.Uint32(*result.Entries[0].LiveUntilLedgerSeq) + // See https://soroban.stellar.org/docs/fundamentals-and-concepts/state-expiration#expiration-ledger + currentLedger := result.LatestLedger + 1 + if xdr.Uint32(currentLedger) > liveUntilLedgerSeq { + ttled = true + t.Logf("ledger entry ttl'ed") + break + } + t.Log("waiting for ledger entry to ttl at ledger", liveUntilLedgerSeq) + time.Sleep(time.Second) + } + require.True(t, ttled) +} + +func TestSimulateInvokePrng_u64_in_range(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + sourceAccount := keypair.Root(StandaloneNetworkPassphrase) + address := sourceAccount.Address() + account := txnbuild.NewSimpleAccount(address, 0) + + helloWorldContract := getHelloWorldContract(t) + + params := preflightTransactionParams(t, client, txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + createInstallContractCodeOperation(account.AccountID, helloWorldContract), + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + + tx, err := txnbuild.NewTransaction(params) + require.NoError(t, err) + sendSuccessfulTransaction(t, client, sourceAccount, tx) + + params = preflightTransactionParams(t, client, txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + createCreateContractOperation(address, helloWorldContract), + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + + tx, err = txnbuild.NewTransaction(params) + require.NoError(t, err) + sendSuccessfulTransaction(t, client, sourceAccount, tx) + + contractID := getContractID(t, address, testSalt, StandaloneNetworkPassphrase) + authAddrArg := "GBRPYHIL2CI3FNQ4BXLFMNDLFJUNPU2HY3ZMFSHONUCEOASW7QC7OX2H" + tx, err = txnbuild.NewTransaction(txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + &txnbuild.CreateAccount{ + Destination: authAddrArg, + Amount: "100000", + SourceAccount: address, + }, + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + require.NoError(t, err) + sendSuccessfulTransaction(t, client, sourceAccount, tx) + low := xdr.Uint64(1500) + high := xdr.Uint64(10000) + params = txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: false, + Operations: []txnbuild.Operation{ + createInvokeHostOperation( + address, + contractID, + "prng_u64_in_range", + xdr.ScVal{ + Type: xdr.ScValTypeScvU64, + U64: &low, + }, + xdr.ScVal{ + Type: xdr.ScValTypeScvU64, + U64: &high, + }, + ), + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + } + tx, err = txnbuild.NewTransaction(params) + + require.NoError(t, err) + + txB64, err := tx.Base64() + require.NoError(t, err) + + request := methods.SimulateTransactionRequest{Transaction: txB64} + var response methods.SimulateTransactionResponse + err = client.CallResult(context.Background(), "simulateTransaction", request, &response) + require.NoError(t, err) + require.Empty(t, response.Error) + + // check the result + require.Len(t, response.Results, 1) + var obtainedResult xdr.ScVal + err = xdr.SafeUnmarshalBase64(response.Results[0].XDR, &obtainedResult) + require.NoError(t, err) + require.Equal(t, xdr.ScValTypeScvU64, obtainedResult.Type) + require.LessOrEqual(t, uint64(*obtainedResult.U64), uint64(high)) + require.GreaterOrEqual(t, uint64(*obtainedResult.U64), uint64(low)) +} + +func TestSimulateSystemEvent(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + sourceAccount := keypair.Root(StandaloneNetworkPassphrase) + address := sourceAccount.Address() + account := txnbuild.NewSimpleAccount(address, 0) + + helloWorldContract := getHelloWorldContract(t) + + params := preflightTransactionParams(t, client, txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + createInstallContractCodeOperation(account.AccountID, helloWorldContract), + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + + tx, err := txnbuild.NewTransaction(params) + require.NoError(t, err) + sendSuccessfulTransaction(t, client, sourceAccount, tx) + + params = preflightTransactionParams(t, client, txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + createCreateContractOperation(address, helloWorldContract), + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + + tx, err = txnbuild.NewTransaction(params) + require.NoError(t, err) + sendSuccessfulTransaction(t, client, sourceAccount, tx) + + contractID := getContractID(t, address, testSalt, StandaloneNetworkPassphrase) + authAddrArg := "GBRPYHIL2CI3FNQ4BXLFMNDLFJUNPU2HY3ZMFSHONUCEOASW7QC7OX2H" + tx, err = txnbuild.NewTransaction(txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + &txnbuild.CreateAccount{ + Destination: authAddrArg, + Amount: "100000", + SourceAccount: address, + }, + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + require.NoError(t, err) + sendSuccessfulTransaction(t, client, sourceAccount, tx) + + contractHash := sha256.Sum256(helloWorldContract) + byteSlice := xdr.ScBytes(contractHash[:]) + + params = txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: false, + Operations: []txnbuild.Operation{ + createInvokeHostOperation( + address, + contractID, + "upgrade_contract", + xdr.ScVal{ + Type: xdr.ScValTypeScvBytes, + Bytes: &byteSlice, + }, + ), + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + } + tx, err = txnbuild.NewTransaction(params) + + require.NoError(t, err) + + txB64, err := tx.Base64() + require.NoError(t, err) + + request := methods.SimulateTransactionRequest{Transaction: txB64} + var response methods.SimulateTransactionResponse + err = client.CallResult(context.Background(), "simulateTransaction", request, &response) + require.NoError(t, err) + require.Empty(t, response.Error) + + // check the result + require.Len(t, response.Results, 1) + var obtainedResult xdr.ScVal + err = xdr.SafeUnmarshalBase64(response.Results[0].XDR, &obtainedResult) + require.NoError(t, err) + + var transactionData xdr.SorobanTransactionData + err = xdr.SafeUnmarshalBase64(response.TransactionData, &transactionData) + require.NoError(t, err) + assert.InDelta(t, 6856, uint32(transactionData.Resources.ReadBytes), 200) + + // the resulting fee is derived from compute factors and a default padding is applied to instructions by preflight + // for test purposes, the most deterministic way to assert the resulting fee is expected value in test scope, is to capture + // the resulting fee from current preflight output and re-plug it in here, rather than try to re-implement the cost-model algo + // in the test. + assert.InDelta(t, 100980, int64(transactionData.ResourceFee), 5000) + assert.InDelta(t, 104, uint32(transactionData.Resources.WriteBytes), 15) + require.GreaterOrEqual(t, len(response.Events), 3) +} diff --git a/cmd/soroban-rpc/internal/test/stellar-core-integration-tests.cfg b/cmd/soroban-rpc/internal/test/stellar-core-integration-tests.cfg new file mode 100644 index 00000000..c194dbae --- /dev/null +++ b/cmd/soroban-rpc/internal/test/stellar-core-integration-tests.cfg @@ -0,0 +1,30 @@ +ARTIFICIALLY_ACCELERATE_TIME_FOR_TESTING=true +ENABLE_DIAGNOSTICS_FOR_TX_SUBMISSION=true + +NETWORK_PASSPHRASE="Standalone Network ; February 2017" + +PEER_PORT=11625 +HTTP_PORT=11626 +PUBLIC_HTTP_PORT=true + +NODE_SEED="SACJC372QBSSKJYTV5A7LWT4NXWHTQO6GHG4QDAVC2XDPX6CNNXFZ4JK" + +NODE_IS_VALIDATOR=true +UNSAFE_QUORUM=true +FAILURE_SAFETY=0 + +DATABASE="postgresql://user=postgres password=mysecretpassword host=core-postgres port=5641 dbname=stellar" + +# Lower the TTL of persistent ledger entries +# so that ledger entry extension/restoring becomes testeable +TESTING_MINIMUM_PERSISTENT_ENTRY_LIFETIME=10 +TESTING_SOROBAN_HIGH_LIMIT_OVERRIDE=true + +[QUORUM_SET] +THRESHOLD_PERCENT=100 +VALIDATORS=["GD5KD2KEZJIGTC63IGW6UMUSMVUVG5IHG64HUTFWCHVZH2N2IBOQN7PS"] + +[HISTORY.vs] +get="cp history/vs/{0} {1}" +put="cp {0} history/vs/{1}" +mkdir="mkdir -p history/vs/{0}" diff --git a/cmd/soroban-rpc/internal/test/transaction_test.go b/cmd/soroban-rpc/internal/test/transaction_test.go new file mode 100644 index 00000000..1cd0d198 --- /dev/null +++ b/cmd/soroban-rpc/internal/test/transaction_test.go @@ -0,0 +1,355 @@ +package test + +import ( + "context" + "crypto/sha256" + "fmt" + "testing" + "time" + + "github.com/creachadair/jrpc2" + "github.com/creachadair/jrpc2/jhttp" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/stellar/go/keypair" + proto "github.com/stellar/go/protocols/stellarcore" + "github.com/stellar/go/txnbuild" + "github.com/stellar/go/xdr" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/methods" +) + +func TestSendTransactionSucceedsWithoutResults(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + kp := keypair.Root(StandaloneNetworkPassphrase) + address := kp.Address() + account := txnbuild.NewSimpleAccount(address, 0) + + tx, err := txnbuild.NewTransaction(txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + &txnbuild.SetOptions{HomeDomain: txnbuild.NewHomeDomain("soroban.com")}, + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + assert.NoError(t, err) + sendSuccessfulTransaction(t, client, kp, tx) +} + +func TestSendTransactionSucceedsWithResults(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + kp := keypair.Root(StandaloneNetworkPassphrase) + address := kp.Address() + account := txnbuild.NewSimpleAccount(address, 0) + + contractBinary := getHelloWorldContract(t) + params := preflightTransactionParams(t, client, txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + createInstallContractCodeOperation(account.AccountID, contractBinary), + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + tx, err := txnbuild.NewTransaction(params) + assert.NoError(t, err) + response := sendSuccessfulTransaction(t, client, kp, tx) + + // Check the result is what we expect + var transactionResult xdr.TransactionResult + assert.NoError(t, xdr.SafeUnmarshalBase64(response.ResultXdr, &transactionResult)) + opResults, ok := transactionResult.OperationResults() + assert.True(t, ok) + invokeHostFunctionResult, ok := opResults[0].MustTr().GetInvokeHostFunctionResult() + assert.True(t, ok) + assert.Equal(t, invokeHostFunctionResult.Code, xdr.InvokeHostFunctionResultCodeInvokeHostFunctionSuccess) + contractHash := sha256.Sum256(contractBinary) + contractHashBytes := xdr.ScBytes(contractHash[:]) + expectedScVal := xdr.ScVal{Type: xdr.ScValTypeScvBytes, Bytes: &contractHashBytes} + var transactionMeta xdr.TransactionMeta + assert.NoError(t, xdr.SafeUnmarshalBase64(response.ResultMetaXdr, &transactionMeta)) + assert.True(t, expectedScVal.Equals(transactionMeta.V3.SorobanMeta.ReturnValue)) + var resultXdr xdr.TransactionResult + assert.NoError(t, xdr.SafeUnmarshalBase64(response.ResultXdr, &resultXdr)) + expectedResult := xdr.TransactionResult{ + FeeCharged: resultXdr.FeeCharged, + Result: xdr.TransactionResultResult{ + Code: xdr.TransactionResultCodeTxSuccess, + Results: &[]xdr.OperationResult{ + { + Code: xdr.OperationResultCodeOpInner, + Tr: &xdr.OperationResultTr{ + Type: xdr.OperationTypeInvokeHostFunction, + InvokeHostFunctionResult: &xdr.InvokeHostFunctionResult{ + Code: xdr.InvokeHostFunctionResultCodeInvokeHostFunctionSuccess, + Success: (*resultXdr.Result.Results)[0].Tr.InvokeHostFunctionResult.Success, + }, + }, + }, + }, + }, + } + + assert.Equal(t, expectedResult, resultXdr) +} + +func TestSendTransactionBadSequence(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + kp := keypair.Root(StandaloneNetworkPassphrase) + address := kp.Address() + account := txnbuild.NewSimpleAccount(address, 0) + + tx, err := txnbuild.NewTransaction(txnbuild.TransactionParams{ + SourceAccount: &account, + Operations: []txnbuild.Operation{ + &txnbuild.SetOptions{HomeDomain: txnbuild.NewHomeDomain("soroban.com")}, + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + assert.NoError(t, err) + tx, err = tx.Sign(StandaloneNetworkPassphrase, kp) + assert.NoError(t, err) + b64, err := tx.Base64() + assert.NoError(t, err) + + request := methods.SendTransactionRequest{Transaction: b64} + var result methods.SendTransactionResponse + err = client.CallResult(context.Background(), "sendTransaction", request, &result) + assert.NoError(t, err) + + assert.NotZero(t, result.LatestLedger) + assert.NotZero(t, result.LatestLedgerCloseTime) + expectedHashHex, err := tx.HashHex(StandaloneNetworkPassphrase) + assert.NoError(t, err) + assert.Equal(t, expectedHashHex, result.Hash) + assert.Equal(t, proto.TXStatusError, result.Status) + var errorResult xdr.TransactionResult + assert.NoError(t, xdr.SafeUnmarshalBase64(result.ErrorResultXDR, &errorResult)) + assert.Equal(t, xdr.TransactionResultCodeTxBadSeq, errorResult.Result.Code) +} + +func TestSendTransactionFailedInsufficientResourceFee(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + kp := keypair.Root(StandaloneNetworkPassphrase) + address := kp.Address() + account := txnbuild.NewSimpleAccount(address, 0) + + contractBinary := getHelloWorldContract(t) + params := preflightTransactionParams(t, client, txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + createInstallContractCodeOperation(account.AccountID, contractBinary), + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + + // make the transaction fail due to insufficient resource fees + params.Operations[0].(*txnbuild.InvokeHostFunction).Ext.SorobanData.ResourceFee /= 2 + + tx, err := txnbuild.NewTransaction(params) + assert.NoError(t, err) + + assert.NoError(t, err) + tx, err = tx.Sign(StandaloneNetworkPassphrase, kp) + assert.NoError(t, err) + b64, err := tx.Base64() + assert.NoError(t, err) + + request := methods.SendTransactionRequest{Transaction: b64} + var result methods.SendTransactionResponse + err = client.CallResult(context.Background(), "sendTransaction", request, &result) + assert.NoError(t, err) + + assert.Equal(t, proto.TXStatusError, result.Status) + var errorResult xdr.TransactionResult + assert.NoError(t, xdr.SafeUnmarshalBase64(result.ErrorResultXDR, &errorResult)) + assert.Equal(t, xdr.TransactionResultCodeTxSorobanInvalid, errorResult.Result.Code) + + assert.Greater(t, len(result.DiagnosticEventsXDR), 0) + var event xdr.DiagnosticEvent + err = xdr.SafeUnmarshalBase64(result.DiagnosticEventsXDR[0], &event) + assert.NoError(t, err) + +} + +func TestSendTransactionFailedInLedger(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + kp := keypair.Root(StandaloneNetworkPassphrase) + address := kp.Address() + account := txnbuild.NewSimpleAccount(address, 0) + + tx, err := txnbuild.NewTransaction(txnbuild.TransactionParams{ + SourceAccount: &account, + IncrementSequenceNum: true, + Operations: []txnbuild.Operation{ + &txnbuild.Payment{ + // Destination doesn't exist, making the transaction fail + Destination: "GA7QYNF7SOWQ3GLR2BGMZEHXAVIRZA4KVWLTJJFC7MGXUA74P7UJVSGZ", + Amount: "100000.0000000", + Asset: txnbuild.NativeAsset{}, + SourceAccount: "", + }, + }, + BaseFee: txnbuild.MinBaseFee, + Preconditions: txnbuild.Preconditions{ + TimeBounds: txnbuild.NewInfiniteTimeout(), + }, + }) + assert.NoError(t, err) + tx, err = tx.Sign(StandaloneNetworkPassphrase, kp) + assert.NoError(t, err) + b64, err := tx.Base64() + assert.NoError(t, err) + + request := methods.SendTransactionRequest{Transaction: b64} + var result methods.SendTransactionResponse + err = client.CallResult(context.Background(), "sendTransaction", request, &result) + assert.NoError(t, err) + + expectedHashHex, err := tx.HashHex(StandaloneNetworkPassphrase) + assert.NoError(t, err) + + assert.Equal(t, expectedHashHex, result.Hash) + if !assert.Equal(t, proto.TXStatusPending, result.Status) { + var txResult xdr.TransactionResult + err := xdr.SafeUnmarshalBase64(result.ErrorResultXDR, &txResult) + assert.NoError(t, err) + fmt.Printf("error: %#v\n", txResult) + } + assert.NotZero(t, result.LatestLedger) + assert.NotZero(t, result.LatestLedgerCloseTime) + + response := getTransaction(t, client, expectedHashHex) + assert.Equal(t, methods.TransactionStatusFailed, response.Status) + var transactionResult xdr.TransactionResult + assert.NoError(t, xdr.SafeUnmarshalBase64(response.ResultXdr, &transactionResult)) + assert.Equal(t, xdr.TransactionResultCodeTxFailed, transactionResult.Result.Code) + assert.Greater(t, response.Ledger, result.LatestLedger) + assert.Greater(t, response.LedgerCloseTime, result.LatestLedgerCloseTime) + assert.GreaterOrEqual(t, response.LatestLedger, response.Ledger) + assert.GreaterOrEqual(t, response.LatestLedgerCloseTime, response.LedgerCloseTime) +} + +func TestSendTransactionFailedInvalidXDR(t *testing.T) { + test := NewTest(t) + + ch := jhttp.NewChannel(test.sorobanRPCURL(), nil) + client := jrpc2.NewClient(ch, nil) + + request := methods.SendTransactionRequest{Transaction: "abcdef"} + var response methods.SendTransactionResponse + jsonRPCErr := client.CallResult(context.Background(), "sendTransaction", request, &response).(*jrpc2.Error) + assert.Equal(t, "invalid_xdr", jsonRPCErr.Message) + assert.Equal(t, jrpc2.InvalidParams, jsonRPCErr.Code) +} + +func sendSuccessfulTransaction(t *testing.T, client *jrpc2.Client, kp *keypair.Full, transaction *txnbuild.Transaction) methods.GetTransactionResponse { + tx, err := transaction.Sign(StandaloneNetworkPassphrase, kp) + assert.NoError(t, err) + b64, err := tx.Base64() + assert.NoError(t, err) + + request := methods.SendTransactionRequest{Transaction: b64} + var result methods.SendTransactionResponse + err = client.CallResult(context.Background(), "sendTransaction", request, &result) + assert.NoError(t, err) + + expectedHashHex, err := tx.HashHex(StandaloneNetworkPassphrase) + assert.NoError(t, err) + + assert.Equal(t, expectedHashHex, result.Hash) + if !assert.Equal(t, proto.TXStatusPending, result.Status) { + var txResult xdr.TransactionResult + err := xdr.SafeUnmarshalBase64(result.ErrorResultXDR, &txResult) + assert.NoError(t, err) + fmt.Printf("error: %#v\n", txResult) + } + assert.NotZero(t, result.LatestLedger) + assert.NotZero(t, result.LatestLedgerCloseTime) + + response := getTransaction(t, client, expectedHashHex) + if !assert.Equal(t, methods.TransactionStatusSuccess, response.Status) { + var txResult xdr.TransactionResult + err := xdr.SafeUnmarshalBase64(response.ResultXdr, &txResult) + assert.NoError(t, err) + fmt.Printf("error: %#v\n", txResult) + var txMeta xdr.TransactionMeta + err = xdr.SafeUnmarshalBase64(response.ResultMetaXdr, &txMeta) + assert.NoError(t, err) + if txMeta.V == 3 && txMeta.V3.SorobanMeta != nil { + if len(txMeta.V3.SorobanMeta.Events) > 0 { + fmt.Println("Contract events:") + for i, e := range txMeta.V3.SorobanMeta.Events { + fmt.Printf(" %d: %s\n", i, e) + } + } + + if len(txMeta.V3.SorobanMeta.DiagnosticEvents) > 0 { + fmt.Println("Diagnostic events:") + for i, d := range txMeta.V3.SorobanMeta.DiagnosticEvents { + fmt.Printf(" %d: %s\n", i, d) + } + } + } + } + + require.NotNil(t, response.ResultXdr) + assert.Greater(t, response.Ledger, result.LatestLedger) + assert.Greater(t, response.LedgerCloseTime, result.LatestLedgerCloseTime) + assert.GreaterOrEqual(t, response.LatestLedger, response.Ledger) + assert.GreaterOrEqual(t, response.LatestLedgerCloseTime, response.LedgerCloseTime) + return response +} + +func getTransaction(t *testing.T, client *jrpc2.Client, hash string) methods.GetTransactionResponse { + var result methods.GetTransactionResponse + for i := 0; i < 60; i++ { + request := methods.GetTransactionRequest{Hash: hash} + err := client.CallResult(context.Background(), "getTransaction", request, &result) + assert.NoError(t, err) + + if result.Status == methods.TransactionStatusNotFound { + time.Sleep(time.Second) + continue + } + + return result + } + t.Fatal("getTransaction timed out") + return result +} diff --git a/cmd/soroban-rpc/internal/transactions/transactions.go b/cmd/soroban-rpc/internal/transactions/transactions.go new file mode 100644 index 00000000..8d58a035 --- /dev/null +++ b/cmd/soroban-rpc/internal/transactions/transactions.go @@ -0,0 +1,213 @@ +package transactions + +import ( + "sync" + "time" + + "github.com/prometheus/client_golang/prometheus" + "github.com/stellar/go/ingest" + "github.com/stellar/go/xdr" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/daemon/interfaces" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/ledgerbucketwindow" +) + +type transaction struct { + bucket *ledgerbucketwindow.LedgerBucket[[]xdr.Hash] + result []byte // encoded XDR of xdr.TransactionResult + meta []byte // encoded XDR of xdr.TransactionMeta + envelope []byte // encoded XDR of xdr.TransactionEnvelope + feeBump bool + successful bool + applicationOrder int32 +} + +// MemoryStore is an in-memory store of Stellar transactions. +type MemoryStore struct { + // networkPassphrase is an immutable string containing the + // Stellar network passphrase. + // Accessing networkPassphrase does not need to be protected + // by the lock + networkPassphrase string + lock sync.RWMutex + transactions map[xdr.Hash]transaction + transactionsByLedger *ledgerbucketwindow.LedgerBucketWindow[[]xdr.Hash] + transactionDurationMetric *prometheus.SummaryVec + transactionCountMetric prometheus.Summary +} + +// NewMemoryStore creates a new MemoryStore. +// The retention window is in units of ledgers. +// All events occurring in the following ledger range +// [ latestLedger - retentionWindow, latestLedger ] +// will be included in the MemoryStore. If the MemoryStore +// is full, any transactions from new ledgers will evict +// older entries outside the retention window. +func NewMemoryStore(daemon interfaces.Daemon, networkPassphrase string, retentionWindow uint32) *MemoryStore { + window := ledgerbucketwindow.NewLedgerBucketWindow[[]xdr.Hash](retentionWindow) + + // transactionDurationMetric is a metric for measuring latency of transaction store operations + transactionDurationMetric := prometheus.NewSummaryVec(prometheus.SummaryOpts{ + Namespace: daemon.MetricsNamespace(), Subsystem: "transactions", Name: "operation_duration_seconds", + Help: "transaction store operation durations, sliding window = 10m", + Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001}, + }, + []string{"operation"}, + ) + transactionCountMetric := prometheus.NewSummary(prometheus.SummaryOpts{ + Namespace: daemon.MetricsNamespace(), Subsystem: "transactions", Name: "count", + Help: "count of transactions ingested, sliding window = 10m", + Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001}, + }) + daemon.MetricsRegistry().MustRegister(transactionDurationMetric, transactionCountMetric) + + return &MemoryStore{ + networkPassphrase: networkPassphrase, + transactions: make(map[xdr.Hash]transaction), + transactionsByLedger: window, + transactionDurationMetric: transactionDurationMetric, + transactionCountMetric: transactionCountMetric, + } +} + +// IngestTransactions adds new transactions from the given ledger into the store. +// As a side effect, transactions which fall outside the retention window are +// removed from the store. +func (m *MemoryStore) IngestTransactions(ledgerCloseMeta xdr.LedgerCloseMeta) error { + startTime := time.Now() + reader, err := ingest.NewLedgerTransactionReaderFromLedgerCloseMeta(m.networkPassphrase, ledgerCloseMeta) + if err != nil { + return err + } + + txCount := ledgerCloseMeta.CountTransactions() + transactions := make([]transaction, txCount) + hashes := make([]xdr.Hash, 0, txCount) + hashMap := map[xdr.Hash]transaction{} + var bucket ledgerbucketwindow.LedgerBucket[[]xdr.Hash] + + for i := 0; i < txCount; i++ { + tx, err := reader.Read() + if err != nil { + return err + } + transactions[i] = transaction{ + bucket: &bucket, + feeBump: tx.Envelope.IsFeeBump(), + applicationOrder: int32(tx.Index), + successful: tx.Result.Result.Successful(), + } + if transactions[i].result, err = tx.Result.Result.MarshalBinary(); err != nil { + return err + } + if transactions[i].meta, err = tx.UnsafeMeta.MarshalBinary(); err != nil { + return err + } + if transactions[i].envelope, err = tx.Envelope.MarshalBinary(); err != nil { + return err + } + if transactions[i].feeBump { + innerHash := tx.Result.InnerHash() + hashMap[innerHash] = transactions[i] + hashes = append(hashes, innerHash) + } + hashMap[tx.Result.TransactionHash] = transactions[i] + hashes = append(hashes, tx.Result.TransactionHash) + } + bucket = ledgerbucketwindow.LedgerBucket[[]xdr.Hash]{ + LedgerSeq: ledgerCloseMeta.LedgerSequence(), + LedgerCloseTimestamp: int64(ledgerCloseMeta.LedgerHeaderHistoryEntry().Header.ScpValue.CloseTime), + BucketContent: hashes, + } + + m.lock.Lock() + defer m.lock.Unlock() + evicted := m.transactionsByLedger.Append(bucket) + if evicted != nil { + // garbage-collect evicted entries + for _, evictedTxHash := range evicted.BucketContent { + delete(m.transactions, evictedTxHash) + } + } + for hash, tx := range hashMap { + m.transactions[hash] = tx + } + m.transactionDurationMetric.With(prometheus.Labels{"operation": "ingest"}).Observe(time.Since(startTime).Seconds()) + m.transactionCountMetric.Observe(float64(txCount)) + return nil +} + +type LedgerInfo struct { + Sequence uint32 + CloseTime int64 +} + +type Transaction struct { + Result []byte // XDR encoded xdr.TransactionResult + Meta []byte // XDR encoded xdr.TransactionMeta + Envelope []byte // XDR encoded xdr.TransactionEnvelope + FeeBump bool + ApplicationOrder int32 + Successful bool + Ledger LedgerInfo +} + +type StoreRange struct { + FirstLedger LedgerInfo + LastLedger LedgerInfo +} + +// GetLatestLedger returns the latest ledger available in the store. +func (m *MemoryStore) GetLatestLedger() LedgerInfo { + m.lock.RLock() + defer m.lock.RUnlock() + if m.transactionsByLedger.Len() > 0 { + lastBucket := m.transactionsByLedger.Get(m.transactionsByLedger.Len() - 1) + return LedgerInfo{ + Sequence: lastBucket.LedgerSeq, + CloseTime: lastBucket.LedgerCloseTimestamp, + } + } + return LedgerInfo{} +} + +// GetTransaction obtains a transaction from the store and whether it's present and the current store range +func (m *MemoryStore) GetTransaction(hash xdr.Hash) (Transaction, bool, StoreRange) { + startTime := time.Now() + m.lock.RLock() + defer m.lock.RUnlock() + var storeRange StoreRange + if m.transactionsByLedger.Len() > 0 { + firstBucket := m.transactionsByLedger.Get(0) + lastBucket := m.transactionsByLedger.Get(m.transactionsByLedger.Len() - 1) + storeRange = StoreRange{ + FirstLedger: LedgerInfo{ + Sequence: firstBucket.LedgerSeq, + CloseTime: firstBucket.LedgerCloseTimestamp, + }, + LastLedger: LedgerInfo{ + Sequence: lastBucket.LedgerSeq, + CloseTime: lastBucket.LedgerCloseTimestamp, + }, + } + } + internalTx, ok := m.transactions[hash] + if !ok { + return Transaction{}, false, storeRange + } + tx := Transaction{ + Result: internalTx.result, + Meta: internalTx.meta, + Envelope: internalTx.envelope, + FeeBump: internalTx.feeBump, + Successful: internalTx.successful, + ApplicationOrder: internalTx.applicationOrder, + Ledger: LedgerInfo{ + Sequence: internalTx.bucket.LedgerSeq, + CloseTime: internalTx.bucket.LedgerCloseTimestamp, + }, + } + + m.transactionDurationMetric.With(prometheus.Labels{"operation": "get"}).Observe(time.Since(startTime).Seconds()) + return tx, true, storeRange +} diff --git a/cmd/soroban-rpc/internal/transactions/transactions_test.go b/cmd/soroban-rpc/internal/transactions/transactions_test.go new file mode 100644 index 00000000..d32a62c6 --- /dev/null +++ b/cmd/soroban-rpc/internal/transactions/transactions_test.go @@ -0,0 +1,377 @@ +package transactions + +import ( + "encoding/hex" + "fmt" + "math" + "runtime" + "testing" + "time" + + "github.com/stellar/go/network" + "github.com/stellar/go/xdr" + "github.com/stretchr/testify/require" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/daemon/interfaces" +) + +func expectedTransaction(t *testing.T, ledger uint32, feeBump bool) Transaction { + tx := Transaction{ + FeeBump: feeBump, + ApplicationOrder: 1, + Ledger: expectedLedgerInfo(ledger), + } + var err error + tx.Result, err = transactionResult(ledger, feeBump).MarshalBinary() + require.NoError(t, err) + tx.Meta, err = xdr.TransactionMeta{ + V: 3, + Operations: &[]xdr.OperationMeta{}, + V3: &xdr.TransactionMetaV3{}, + }.MarshalBinary() + require.NoError(t, err) + tx.Envelope, err = txEnvelope(ledger, feeBump).MarshalBinary() + require.NoError(t, err) + return tx +} + +func expectedLedgerInfo(ledgerSequence uint32) LedgerInfo { + return LedgerInfo{ + Sequence: ledgerSequence, + CloseTime: ledgerCloseTime(ledgerSequence), + } + +} + +func expectedStoreRange(startLedger uint32, endLedger uint32) StoreRange { + return StoreRange{ + FirstLedger: expectedLedgerInfo(startLedger), + LastLedger: expectedLedgerInfo(endLedger), + } +} + +func txHash(ledgerSequence uint32, feebump bool) xdr.Hash { + envelope := txEnvelope(ledgerSequence, feebump) + hash, err := network.HashTransactionInEnvelope(envelope, "passphrase") + if err != nil { + panic(err) + } + + return hash +} + +func ledgerCloseTime(ledgerSequence uint32) int64 { + return int64(ledgerSequence)*25 + 100 +} + +func transactionResult(ledgerSequence uint32, feeBump bool) xdr.TransactionResult { + if feeBump { + return xdr.TransactionResult{ + FeeCharged: 100, + Result: xdr.TransactionResultResult{ + Code: xdr.TransactionResultCodeTxFeeBumpInnerFailed, + InnerResultPair: &xdr.InnerTransactionResultPair{ + TransactionHash: txHash(ledgerSequence, false), + Result: xdr.InnerTransactionResult{ + Result: xdr.InnerTransactionResultResult{ + Code: xdr.TransactionResultCodeTxBadSeq, + }, + }, + }, + }, + } + } + return xdr.TransactionResult{ + FeeCharged: 100, + Result: xdr.TransactionResultResult{ + Code: xdr.TransactionResultCodeTxBadSeq, + }, + } +} + +func txMeta(ledgerSequence uint32, feeBump bool) xdr.LedgerCloseMeta { + envelope := txEnvelope(ledgerSequence, feeBump) + persistentKey := xdr.ScSymbol("TEMPVAL") + contractIDBytes, _ := hex.DecodeString("df06d62447fd25da07c0135eed7557e5a5497ee7d15b7fe345bd47e191d8f577") + var contractID xdr.Hash + copy(contractID[:], contractIDBytes) + contractAddress := xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &contractID, + } + xdrTrue := true + operationChanges := xdr.LedgerEntryChanges{ + { + Type: xdr.LedgerEntryChangeTypeLedgerEntryState, + State: &xdr.LedgerEntry{ + LastModifiedLedgerSeq: xdr.Uint32(ledgerSequence - 1), + Data: xdr.LedgerEntryData{ + Type: xdr.LedgerEntryTypeContractData, + ContractData: &xdr.ContractDataEntry{ + Contract: contractAddress, + Key: xdr.ScVal{ + Type: xdr.ScValTypeScvSymbol, + Sym: &persistentKey, + }, + Durability: xdr.ContractDataDurabilityPersistent, + Val: xdr.ScVal{ + Type: xdr.ScValTypeScvBool, + B: &xdrTrue, + }, + }, + }, + }, + }, + { + Type: xdr.LedgerEntryChangeTypeLedgerEntryUpdated, + Updated: &xdr.LedgerEntry{ + LastModifiedLedgerSeq: xdr.Uint32(ledgerSequence - 1), + Data: xdr.LedgerEntryData{ + Type: xdr.LedgerEntryTypeContractData, + ContractData: &xdr.ContractDataEntry{ + Contract: xdr.ScAddress{ + Type: xdr.ScAddressTypeScAddressTypeContract, + ContractId: &contractID, + }, + Key: xdr.ScVal{ + Type: xdr.ScValTypeScvSymbol, + Sym: &persistentKey, + }, + Durability: xdr.ContractDataDurabilityPersistent, + Val: xdr.ScVal{ + Type: xdr.ScValTypeScvBool, + B: &xdrTrue, + }, + }, + }, + }, + }, + } + txProcessing := []xdr.TransactionResultMeta{ + { + TxApplyProcessing: xdr.TransactionMeta{ + V: 3, + Operations: &[]xdr.OperationMeta{ + { + Changes: operationChanges, + }, + }, + V3: &xdr.TransactionMetaV3{}, + }, + Result: xdr.TransactionResultPair{ + TransactionHash: txHash(ledgerSequence, feeBump), + Result: transactionResult(ledgerSequence, feeBump), + }, + }, + } + + components := []xdr.TxSetComponent{ + { + Type: xdr.TxSetComponentTypeTxsetCompTxsMaybeDiscountedFee, + TxsMaybeDiscountedFee: &xdr.TxSetComponentTxsMaybeDiscountedFee{ + BaseFee: nil, + Txs: []xdr.TransactionEnvelope{ + envelope, + }, + }, + }, + } + return xdr.LedgerCloseMeta{ + V: 1, + V1: &xdr.LedgerCloseMetaV1{ + LedgerHeader: xdr.LedgerHeaderHistoryEntry{ + Header: xdr.LedgerHeader{ + ScpValue: xdr.StellarValue{ + CloseTime: xdr.TimePoint(ledgerCloseTime(ledgerSequence)), + }, + LedgerSeq: xdr.Uint32(ledgerSequence), + }, + }, + TxProcessing: txProcessing, + TxSet: xdr.GeneralizedTransactionSet{ + V: 1, + V1TxSet: &xdr.TransactionSetV1{ + PreviousLedgerHash: xdr.Hash{1}, + Phases: []xdr.TransactionPhase{ + { + V: 0, + V0Components: &components, + }, + }, + }, + }, + }, + } +} + +func txEnvelope(ledgerSequence uint32, feeBump bool) xdr.TransactionEnvelope { + var envelope xdr.TransactionEnvelope + var err error + if feeBump { + envelope, err = xdr.NewTransactionEnvelope(xdr.EnvelopeTypeEnvelopeTypeTxFeeBump, xdr.FeeBumpTransactionEnvelope{ + Tx: xdr.FeeBumpTransaction{ + Fee: 10, + FeeSource: xdr.MustMuxedAddress("MA7QYNF7SOWQ3GLR2BGMZEHXAVIRZA4KVWLTJJFC7MGXUA74P7UJVAAAAAAAAAAAAAJLK"), + InnerTx: xdr.FeeBumpTransactionInnerTx{ + Type: xdr.EnvelopeTypeEnvelopeTypeTx, + V1: &xdr.TransactionV1Envelope{ + Tx: xdr.Transaction{ + Fee: 1, + SeqNum: xdr.SequenceNumber(ledgerSequence + 90), + SourceAccount: xdr.MustMuxedAddress("MA7QYNF7SOWQ3GLR2BGMZEHXAVIRZA4KVWLTJJFC7MGXUA74P7UJVAAAAAAAAAAAAAJLK"), + }, + }, + }, + }, + }) + } else { + envelope, err = xdr.NewTransactionEnvelope(xdr.EnvelopeTypeEnvelopeTypeTx, xdr.TransactionV1Envelope{ + Tx: xdr.Transaction{ + Fee: 1, + SeqNum: xdr.SequenceNumber(ledgerSequence + 90), + SourceAccount: xdr.MustMuxedAddress("MA7QYNF7SOWQ3GLR2BGMZEHXAVIRZA4KVWLTJJFC7MGXUA74P7UJVAAAAAAAAAAAAAJLK"), + }, + }) + } + if err != nil { + panic(err) + } + return envelope +} + +func requirePresent(t *testing.T, store *MemoryStore, feeBump bool, ledgerSequence, firstSequence, lastSequence uint32) { + tx, ok, storeRange := store.GetTransaction(txHash(ledgerSequence, false)) + require.True(t, ok) + require.Equal(t, expectedTransaction(t, ledgerSequence, feeBump), tx) + require.Equal(t, expectedStoreRange(firstSequence, lastSequence), storeRange) + if feeBump { + tx, ok, storeRange = store.GetTransaction(txHash(ledgerSequence, true)) + require.True(t, ok) + require.Equal(t, expectedTransaction(t, ledgerSequence, feeBump), tx) + require.Equal(t, expectedStoreRange(firstSequence, lastSequence), storeRange) + } +} + +func TestIngestTransactions(t *testing.T) { + // Use a small retention window to test eviction + store := NewMemoryStore(interfaces.MakeNoOpDeamon(), "passphrase", 3) + + _, ok, storeRange := store.GetTransaction(txHash(1, false)) + require.False(t, ok) + require.Equal(t, StoreRange{}, storeRange) + + // Insert ledger 1 + require.NoError(t, store.IngestTransactions(txMeta(1, false))) + requirePresent(t, store, false, 1, 1, 1) + require.Len(t, store.transactions, 1) + + // Insert ledger 2 + require.NoError(t, store.IngestTransactions(txMeta(2, true))) + requirePresent(t, store, false, 1, 1, 2) + requirePresent(t, store, true, 2, 1, 2) + require.Len(t, store.transactions, 3) + + // Insert ledger 3 + require.NoError(t, store.IngestTransactions(txMeta(3, false))) + requirePresent(t, store, false, 1, 1, 3) + requirePresent(t, store, true, 2, 1, 3) + requirePresent(t, store, false, 3, 1, 3) + require.Len(t, store.transactions, 4) + + // Now we have filled the memory store + + // Insert ledger 4, which will cause the window to move and evict ledger 1 + require.NoError(t, store.IngestTransactions(txMeta(4, false))) + requirePresent(t, store, true, 2, 2, 4) + requirePresent(t, store, false, 3, 2, 4) + requirePresent(t, store, false, 4, 2, 4) + + _, ok, storeRange = store.GetTransaction(txHash(1, false)) + require.False(t, ok) + require.Equal(t, expectedStoreRange(2, 4), storeRange) + require.Equal(t, uint32(3), store.transactionsByLedger.Len()) + require.Len(t, store.transactions, 4) + + // Insert ledger 5, which will cause the window to move and evict ledger 2 + require.NoError(t, store.IngestTransactions(txMeta(5, false))) + requirePresent(t, store, false, 3, 3, 5) + requirePresent(t, store, false, 4, 3, 5) + requirePresent(t, store, false, 5, 3, 5) + + _, ok, storeRange = store.GetTransaction(txHash(2, false)) + require.False(t, ok) + require.Equal(t, expectedStoreRange(3, 5), storeRange) + require.Equal(t, uint32(3), store.transactionsByLedger.Len()) + require.Len(t, store.transactions, 3) + + _, ok, storeRange = store.GetTransaction(txHash(2, true)) + require.False(t, ok) + require.Equal(t, expectedStoreRange(3, 5), storeRange) + require.Equal(t, uint32(3), store.transactionsByLedger.Len()) + require.Len(t, store.transactions, 3) +} + +func stableHeapInUse() int64 { + var ( + m = runtime.MemStats{} + prevInUse uint64 + prevNumGC uint32 + ) + + for { + runtime.GC() + + // Sleeping to allow GC to run a few times and collect all temporary data. + time.Sleep(100 * time.Millisecond) + + runtime.ReadMemStats(&m) + + // Considering heap stable if recent cycle collected less than 10KB. + if prevNumGC != 0 && m.NumGC > prevNumGC && math.Abs(float64(m.HeapInuse-prevInUse)) < 10*1024 { + break + } + + prevInUse = m.HeapInuse + prevNumGC = m.NumGC + } + + return int64(m.HeapInuse) +} + +func byteCountBinary(b int64) string { + const unit = 1024 + if b < unit { + return fmt.Sprintf("%d B", b) + } + div, exp := int64(unit), 0 + for n := b / unit; n >= unit; n /= unit { + div *= unit + exp++ + } + return fmt.Sprintf("%.1f %ciB", float64(b)/float64(div), "KMGTPE"[exp]) +} + +func BenchmarkIngestTransactionsMemory(b *testing.B) { + roundsNumber := uint32(b.N * 100000) + // Use a small retention window to test eviction + store := NewMemoryStore(interfaces.MakeNoOpDeamon(), "passphrase", roundsNumber) + + heapSizeBefore := stableHeapInUse() + + for i := uint32(0); i < roundsNumber; i++ { + // Insert ledger i + require.NoError(b, store.IngestTransactions(txMeta(i, false))) + } + heapSizeAfter := stableHeapInUse() + b.ReportMetric(float64(heapSizeAfter), "bytes/100k_transactions") + b.Logf("Memory consumption for %d transactions %v", roundsNumber, byteCountBinary(heapSizeAfter-heapSizeBefore)) + + // we want to generate 500*20000 transactions total, to cover the expected daily amount of transactions. + projectedTransactionCount := int64(500 * 20000) + projectedMemoryUtiliztion := (heapSizeAfter - heapSizeBefore) * projectedTransactionCount / int64(roundsNumber) + b.Logf("Projected memory consumption for %d transactions %v", projectedTransactionCount, byteCountBinary(projectedMemoryUtiliztion)) + b.ReportMetric(float64(projectedMemoryUtiliztion), "bytes/10M_transactions") + + // add another call to store to prevent the GC from collecting. + store.GetTransaction(xdr.Hash{}) +} diff --git a/cmd/soroban-rpc/internal/util/panicgroup.go b/cmd/soroban-rpc/internal/util/panicgroup.go new file mode 100644 index 00000000..b131e91c --- /dev/null +++ b/cmd/soroban-rpc/internal/util/panicgroup.go @@ -0,0 +1,116 @@ +package util + +import ( + "fmt" + "os" + "reflect" + "runtime" + "runtime/debug" + "strings" + + "github.com/prometheus/client_golang/prometheus" + "github.com/stellar/go/support/log" +) + +var UnrecoverablePanicGroup = panicGroup{ + logPanicsToStdErr: true, + exitProcessOnPanic: true, +} + +var RecoverablePanicGroup = panicGroup{ + logPanicsToStdErr: true, + exitProcessOnPanic: false, +} + +type panicGroup struct { + log *log.Entry + logPanicsToStdErr bool + exitProcessOnPanic bool + panicsCounter prometheus.Counter +} + +func (pg *panicGroup) Log(log *log.Entry) *panicGroup { + return &panicGroup{ + log: log, + logPanicsToStdErr: pg.logPanicsToStdErr, + exitProcessOnPanic: pg.exitProcessOnPanic, + panicsCounter: pg.panicsCounter, + } +} + +func (pg *panicGroup) Counter(counter prometheus.Counter) *panicGroup { + return &panicGroup{ + log: pg.log, + logPanicsToStdErr: pg.logPanicsToStdErr, + exitProcessOnPanic: pg.exitProcessOnPanic, + panicsCounter: counter, + } +} + +// panicGroup give us the ability to spin a goroutine, with clear upfront definitions on what should be done in the +// case of an internal panic. +func (pg *panicGroup) Go(fn func()) { + go func() { + defer pg.recoverRoutine(fn) + fn() + }() +} + +func (pg *panicGroup) recoverRoutine(fn func()) { + recoverRes := recover() + if recoverRes == nil { + return + } + cs := getPanicCallStack(recoverRes, fn) + if len(cs) <= 0 { + return + } + if pg.log != nil { + for _, line := range cs { + pg.log.Warn(line) + } + } + if pg.logPanicsToStdErr { + for _, line := range cs { + fmt.Fprintln(os.Stderr, line) + } + } + + if pg.panicsCounter != nil { + pg.panicsCounter.Inc() + } + if pg.exitProcessOnPanic { + os.Exit(1) + } +} + +func getPanicCallStack(recoverRes any, fn func()) (outCallStack []string) { + functionName := runtime.FuncForPC(reflect.ValueOf(fn).Pointer()).Name() + return CallStack(recoverRes, functionName, "(*panicGroup).Go", 10) +} + +// CallStack returns an array of strings representing the current call stack. The method is +// tuned for the purpose of panic handler, and used as a helper in contructing the list of entries we want +// to write to the log / stderr / telemetry. +func CallStack(recoverRes any, topLevelFunctionName string, lastCallstackMethod string, unwindStackLines int) (callStack []string) { + if topLevelFunctionName != "" { + callStack = append(callStack, fmt.Sprintf("%v when calling %v", recoverRes, topLevelFunctionName)) + } else { + callStack = append(callStack, fmt.Sprintf("%v", recoverRes)) + } + // while we're within the recoverRoutine, the debug.Stack() would return the + // call stack where the panic took place. + callStackStrings := string(debug.Stack()) + for i, callStackLine := range strings.FieldsFunc(callStackStrings, func(r rune) bool { return r == '\n' || r == '\t' }) { + // skip the first (unwindStackLines) entries, since these are the "debug.Stack()" entries, which aren't really useful. + if i < unwindStackLines { + continue + } + callStack = append(callStack, callStackLine) + // once we reached the limiter entry, stop. + if strings.Contains(callStackLine, lastCallstackMethod) { + break + } + } + return callStack +} diff --git a/cmd/soroban-rpc/internal/util/panicgroup_test.go b/cmd/soroban-rpc/internal/util/panicgroup_test.go new file mode 100644 index 00000000..63c42206 --- /dev/null +++ b/cmd/soroban-rpc/internal/util/panicgroup_test.go @@ -0,0 +1,111 @@ +package util + +import ( + "os" + "sync" + "testing" + "time" + + "github.com/sirupsen/logrus" + "github.com/stellar/go/support/log" + "github.com/stretchr/testify/require" +) + +func TestTrivialPanicGroup(t *testing.T) { + ch := make(chan int) + + panicGroup := panicGroup{} + panicGroup.Go(func() { ch <- 1 }) + + <-ch +} + +type TestLogsCounter struct { + entry *log.Entry + mu sync.Mutex + writtenLogEntries [logrus.TraceLevel + 1]int +} + +func makeTestLogCounter() *TestLogsCounter { + out := &TestLogsCounter{ + entry: log.New(), + } + out.entry.AddHook(out) + out.entry.SetLevel(logrus.DebugLevel) + return out +} +func (te *TestLogsCounter) Entry() *log.Entry { + return te.entry +} +func (te *TestLogsCounter) Levels() []logrus.Level { + return []logrus.Level{logrus.PanicLevel, logrus.FatalLevel, logrus.ErrorLevel, logrus.WarnLevel, logrus.InfoLevel, logrus.DebugLevel, logrus.TraceLevel} +} +func (te *TestLogsCounter) Fire(e *logrus.Entry) error { + te.mu.Lock() + defer te.mu.Unlock() + te.writtenLogEntries[e.Level]++ + return nil +} +func (te *TestLogsCounter) GetLevel(i int) int { + te.mu.Lock() + defer te.mu.Unlock() + return te.writtenLogEntries[i] +} + +func PanicingFunctionA(w *int) { + *w = 0 +} + +func IndirectPanicingFunctionB() { + PanicingFunctionA(nil) +} + +func IndirectPanicingFunctionC() { + IndirectPanicingFunctionB() +} + +func TestPanicGroupLog(t *testing.T) { + logCounter := makeTestLogCounter() + panicGroup := panicGroup{ + log: logCounter.Entry(), + } + panicGroup.Go(IndirectPanicingFunctionC) + // wait until we get all the log entries. + waitStarted := time.Now() + for time.Since(waitStarted) < 5*time.Second { + warningCount := logCounter.GetLevel(3) + if warningCount >= 9 { + return + } + time.Sleep(1 * time.Millisecond) + } + t.FailNow() +} + +func TestPanicGroupStdErr(t *testing.T) { + tmpFile, err := os.CreateTemp("", "TestPanicGroupStdErr") + require.NoError(t, err) + defaultStdErr := os.Stderr + os.Stderr = tmpFile + defer func() { + os.Stderr = defaultStdErr + tmpFile.Close() + os.Remove(tmpFile.Name()) + }() + + panicGroup := panicGroup{ + logPanicsToStdErr: true, + } + panicGroup.Go(IndirectPanicingFunctionC) + // wait until we get all the log entries. + waitStarted := time.Now() + for time.Since(waitStarted) < 5*time.Second { + outErrBytes, err := os.ReadFile(tmpFile.Name()) + require.NoError(t, err) + if len(outErrBytes) >= 100 { + return + } + time.Sleep(1 * time.Millisecond) + } + t.FailNow() +} diff --git a/cmd/soroban-rpc/lib/preflight.h b/cmd/soroban-rpc/lib/preflight.h new file mode 100644 index 00000000..81db0c54 --- /dev/null +++ b/cmd/soroban-rpc/lib/preflight.h @@ -0,0 +1,65 @@ +// NOTE: You could use https://michael-f-bryan.github.io/rust-ffi-guide/cbindgen.html to generate +// this header automatically from your Rust code. But for now, we'll just write it by hand. + +#include +#include + +typedef struct ledger_info_t { + uint32_t protocol_version; + uint32_t sequence_number; + uint64_t timestamp; + const char *network_passphrase; + uint32_t base_reserve; + uint32_t min_temp_entry_ttl; + uint32_t min_persistent_entry_ttl; + uint32_t max_entry_ttl; +} ledger_info_t; + +typedef struct xdr_t { + unsigned char *xdr; + size_t len; +} xdr_t; + +typedef struct xdr_vector_t { + xdr_t *array; + size_t len; +} xdr_vector_t; + +typedef struct resource_config_t { + uint64_t instruction_leeway; // Allow this many extra instructions when budgeting +} resource_config_t; + +typedef struct preflight_result_t { + char *error; // Error string in case of error, otherwise null + xdr_vector_t auth; // array of SorobanAuthorizationEntries + xdr_t result; // XDR SCVal + xdr_t transaction_data; + int64_t min_fee; // Minimum recommended resource fee + xdr_vector_t events; // array of XDR DiagnosticEvents + uint64_t cpu_instructions; + uint64_t memory_bytes; + xdr_t pre_restore_transaction_data; // SorobanTransactionData XDR for a prerequired RestoreFootprint operation + int64_t pre_restore_min_fee; // Minimum recommended resource fee for a prerequired RestoreFootprint operation +} preflight_result_t; + +preflight_result_t *preflight_invoke_hf_op(uintptr_t handle, // Go Handle to forward to SnapshotSourceGet + uint64_t bucket_list_size, // Bucket list size of current ledger + const xdr_t invoke_hf_op, // InvokeHostFunctionOp XDR + const xdr_t source_account, // AccountId XDR + const ledger_info_t ledger_info, + const resource_config_t resource_config, + bool enable_debug); + +preflight_result_t *preflight_footprint_ttl_op(uintptr_t handle, // Go Handle to forward to SnapshotSourceGet + uint64_t bucket_list_size, // Bucket list size of current ledger + const xdr_t op_body, // OperationBody XDR + const xdr_t footprint, // LedgerFootprint XDR + uint32_t current_ledger_seq); // Current ledger sequence + + +// LedgerKey XDR to LedgerEntry XDR +extern xdr_t SnapshotSourceGet(uintptr_t handle, xdr_t ledger_key); + +void free_preflight_result(preflight_result_t *result); + +extern void FreeGoXDR(xdr_t xdr); diff --git a/cmd/soroban-rpc/lib/preflight/Cargo.toml b/cmd/soroban-rpc/lib/preflight/Cargo.toml new file mode 100644 index 00000000..6bdd9ba8 --- /dev/null +++ b/cmd/soroban-rpc/lib/preflight/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "preflight" +version = "20.2.0" +publish = false + +[lib] +crate-type = ["staticlib"] + +[dependencies] +base64 = { workspace = true } +libc = "0.2.147" +sha2 = { workspace = true } +# we need the testutils feature in order to get backtraces in the preflight library +# when soroban rpc is configured to run with --preflight-enable-debug +soroban-env-host = { workspace = true, features = ["recording_auth", "testutils"]} +soroban-simulation = { workspace = true } diff --git a/cmd/soroban-rpc/lib/preflight/src/lib.rs b/cmd/soroban-rpc/lib/preflight/src/lib.rs new file mode 100644 index 00000000..25746e74 --- /dev/null +++ b/cmd/soroban-rpc/lib/preflight/src/lib.rs @@ -0,0 +1,460 @@ +extern crate base64; +extern crate libc; +extern crate sha2; +extern crate soroban_env_host; +extern crate soroban_simulation; + +use sha2::{Digest, Sha256}; +use soroban_env_host::xdr::{ + AccountId, Hash, InvokeHostFunctionOp, LedgerEntry, LedgerEntryData, LedgerFootprint, + LedgerKey, LedgerKeyTtl, Limits, OperationBody, ReadXdr, TtlEntry, WriteXdr, +}; +use soroban_env_host::LedgerInfo; +use soroban_simulation::{ledger_storage, ResourceConfig}; +use soroban_simulation::{ + simulate_footprint_ttl_op, simulate_invoke_hf_op, LedgerStorage, SimulationResult, +}; +use std::error::Error; +use std::ffi::{CStr, CString}; +use std::panic; +use std::ptr::null_mut; +use std::{mem, slice}; + +#[repr(C)] +#[derive(Copy, Clone)] +pub struct CLedgerInfo { + pub protocol_version: u32, + pub sequence_number: u32, + pub timestamp: u64, + pub network_passphrase: *const libc::c_char, + pub base_reserve: u32, + pub min_temp_entry_ttl: u32, + pub min_persistent_entry_ttl: u32, + pub max_entry_ttl: u32, +} + +impl From for LedgerInfo { + fn from(c: CLedgerInfo) -> Self { + let network_passphrase = from_c_string(c.network_passphrase); + Self { + protocol_version: c.protocol_version, + sequence_number: c.sequence_number, + timestamp: c.timestamp, + network_id: Sha256::digest(network_passphrase).into(), + base_reserve: c.base_reserve, + min_temp_entry_ttl: c.min_temp_entry_ttl, + min_persistent_entry_ttl: c.min_persistent_entry_ttl, + max_entry_ttl: c.max_entry_ttl, + } + } +} + +#[repr(C)] +#[derive(Copy, Clone)] +pub struct CXDR { + pub xdr: *mut libc::c_uchar, + pub len: libc::size_t, +} + +// It would be nicer to derive Default, but we can't. It errors with: +// The trait bound `*mut u8: std::default::Default` is not satisfied +fn get_default_c_xdr() -> CXDR { + CXDR { + xdr: null_mut(), + len: 0, + } +} + +#[repr(C)] +#[derive(Copy, Clone)] +pub struct CXDRVector { + pub array: *mut CXDR, + pub len: libc::size_t, +} + +fn get_default_c_xdr_vector() -> CXDRVector { + CXDRVector { + array: null_mut(), + len: 0, + } +} + +#[repr(C)] +#[derive(Copy, Clone)] +pub struct CResourceConfig { + pub instruction_leeway: u64, +} + +impl From for ResourceConfig { + fn from(r: CResourceConfig) -> Self { + return ResourceConfig { + instruction_leeway: r.instruction_leeway, + }; + } +} + +#[repr(C)] +#[derive(Copy, Clone)] +pub struct CPreflightResult { + // Error string in case of error, otherwise null + pub error: *mut libc::c_char, + // Error string in case of error, otherwise null + pub auth: CXDRVector, + // XDR SCVal + pub result: CXDR, + // SorobanTransactionData XDR + pub transaction_data: CXDR, + // Minimum recommended resource fee + pub min_fee: i64, + // array of XDR ContractEvents + pub events: CXDRVector, + pub cpu_instructions: u64, + pub memory_bytes: u64, + // SorobanTransactionData XDR for a prerequired RestoreFootprint operation + pub pre_restore_transaction_data: CXDR, + // Minimum recommended resource fee for a prerequired RestoreFootprint operation + pub pre_restore_min_fee: i64, +} + +impl From for CPreflightResult { + fn from(s: SimulationResult) -> Self { + let mut result = Self { + error: string_to_c(s.error), + auth: xdr_vec_to_c(s.auth), + result: option_xdr_to_c(s.result), + transaction_data: option_xdr_to_c(s.transaction_data), + min_fee: s.min_fee, + events: xdr_vec_to_c(s.events), + cpu_instructions: s.cpu_instructions, + memory_bytes: s.memory_bytes, + pre_restore_transaction_data: get_default_c_xdr(), + pre_restore_min_fee: 0, + }; + if let Some(p) = s.restore_preamble { + result.pre_restore_min_fee = p.min_fee; + result.pre_restore_transaction_data = xdr_to_c(p.transaction_data); + }; + result + } +} + +#[no_mangle] +pub extern "C" fn preflight_invoke_hf_op( + handle: libc::uintptr_t, // Go Handle to forward to SnapshotSourceGet and SnapshotSourceHas + bucket_list_size: u64, // Bucket list size for current ledger + invoke_hf_op: CXDR, // InvokeHostFunctionOp XDR in base64 + source_account: CXDR, // AccountId XDR in base64 + ledger_info: CLedgerInfo, + resource_config: CResourceConfig, + enable_debug: bool, +) -> *mut CPreflightResult { + catch_preflight_panic(Box::new(move || { + preflight_invoke_hf_op_or_maybe_panic( + handle, + bucket_list_size, + invoke_hf_op, + source_account, + ledger_info, + resource_config, + enable_debug, + ) + })) +} + +fn preflight_invoke_hf_op_or_maybe_panic( + handle: libc::uintptr_t, + bucket_list_size: u64, // Go Handle to forward to SnapshotSourceGet and SnapshotSourceHas + invoke_hf_op: CXDR, // InvokeHostFunctionOp XDR in base64 + source_account: CXDR, // AccountId XDR in base64 + ledger_info: CLedgerInfo, + resource_config: CResourceConfig, + enable_debug: bool, +) -> Result> { + let invoke_hf_op = + InvokeHostFunctionOp::from_xdr(from_c_xdr(invoke_hf_op), Limits::none()).unwrap(); + let source_account = AccountId::from_xdr(from_c_xdr(source_account), Limits::none()).unwrap(); + let go_storage = GoLedgerStorage { + golang_handle: handle, + current_ledger_sequence: ledger_info.sequence_number, + }; + let ledger_storage = + LedgerStorage::with_restore_tracking(Box::new(go_storage), ledger_info.sequence_number)?; + let result = simulate_invoke_hf_op( + ledger_storage, + bucket_list_size, + invoke_hf_op, + source_account, + LedgerInfo::from(ledger_info), + resource_config.into(), + enable_debug, + ); + match result { + Ok(r) => Ok(r.into()), + Err(e) => Err(e), + } +} + +#[no_mangle] +pub extern "C" fn preflight_footprint_ttl_op( + handle: libc::uintptr_t, // Go Handle to forward to SnapshotSourceGet and SnapshotSourceHas + bucket_list_size: u64, // Bucket list size for current ledger + op_body: CXDR, // OperationBody XDR + footprint: CXDR, // LedgerFootprint XDR + current_ledger_seq: u32, +) -> *mut CPreflightResult { + catch_preflight_panic(Box::new(move || { + preflight_footprint_ttl_op_or_maybe_panic( + handle, + bucket_list_size, + op_body, + footprint, + current_ledger_seq, + ) + })) +} + +fn preflight_footprint_ttl_op_or_maybe_panic( + handle: libc::uintptr_t, + bucket_list_size: u64, + op_body: CXDR, + footprint: CXDR, + current_ledger_seq: u32, +) -> Result> { + let op_body = OperationBody::from_xdr(from_c_xdr(op_body), Limits::none()).unwrap(); + let footprint = LedgerFootprint::from_xdr(from_c_xdr(footprint), Limits::none()).unwrap(); + let go_storage = GoLedgerStorage { + golang_handle: handle, + current_ledger_sequence: current_ledger_seq, + }; + let ledger_storage = &LedgerStorage::new(Box::new(go_storage), current_ledger_seq); + let result = simulate_footprint_ttl_op( + ledger_storage, + bucket_list_size, + op_body, + footprint, + current_ledger_seq, + ); + match result { + Ok(r) => Ok(r.into()), + Err(e) => Err(e), + } +} + +fn preflight_error(str: String) -> CPreflightResult { + let c_str = CString::new(str).unwrap(); + CPreflightResult { + error: c_str.into_raw(), + auth: get_default_c_xdr_vector(), + result: get_default_c_xdr(), + transaction_data: get_default_c_xdr(), + min_fee: 0, + events: get_default_c_xdr_vector(), + cpu_instructions: 0, + memory_bytes: 0, + pre_restore_transaction_data: get_default_c_xdr(), + pre_restore_min_fee: 0, + } +} + +fn catch_preflight_panic( + op: Box Result>>, +) -> *mut CPreflightResult { + // catch panics before they reach foreign callers (which otherwise would result in + // undefined behavior) + let res: std::thread::Result>> = + panic::catch_unwind(panic::AssertUnwindSafe(op)); + let c_preflight_result = match res { + Err(panic) => match panic.downcast::() { + Ok(panic_msg) => preflight_error(format!("panic during preflight() call: {panic_msg}")), + Err(_) => preflight_error("panic during preflight() call: unknown cause".to_string()), + }, + // See https://docs.rs/anyhow/latest/anyhow/struct.Error.html#display-representations + Ok(r) => r.unwrap_or_else(|e| preflight_error(format!("{e:?}"))), + }; + // transfer ownership to caller + // caller needs to invoke free_preflight_result(result) when done + Box::into_raw(Box::new(c_preflight_result)) +} + +fn xdr_to_c(v: impl WriteXdr) -> CXDR { + let (xdr, len) = vec_to_c_array(v.to_xdr(Limits::none()).unwrap()); + CXDR { xdr, len } +} + +fn option_xdr_to_c(v: Option) -> CXDR { + v.map_or( + CXDR { + xdr: null_mut(), + len: 0, + }, + xdr_to_c, + ) +} + +fn xdr_vec_to_c(v: Vec) -> CXDRVector { + let c_v = v.into_iter().map(xdr_to_c).collect(); + let (array, len) = vec_to_c_array(c_v); + CXDRVector { array, len } +} + +fn string_to_c(str: String) -> *mut libc::c_char { + CString::new(str).unwrap().into_raw() +} + +fn vec_to_c_array(mut v: Vec) -> (*mut T, libc::size_t) { + // Make sure length and capacity are the same + // (this allows using the length as the capacity when deallocating the vector) + v.shrink_to_fit(); + let len = v.len(); + assert_eq!(len, v.capacity()); + + // Get the pointer to our vector, we will deallocate it in free_c_null_terminated_char_array() + // TODO: replace by `out_vec.into_raw_parts()` once the API stabilizes + let ptr = v.as_mut_ptr(); + mem::forget(v); + + (ptr, len) +} + +/// . +/// +/// # Safety +/// +/// . +#[no_mangle] +pub unsafe extern "C" fn free_preflight_result(result: *mut CPreflightResult) { + if result.is_null() { + return; + } + let boxed = Box::from_raw(result); + free_c_string(boxed.error); + free_c_xdr_array(boxed.auth); + free_c_xdr(boxed.result); + free_c_xdr(boxed.transaction_data); + free_c_xdr_array(boxed.events); + free_c_xdr(boxed.pre_restore_transaction_data); +} + +fn free_c_string(str: *mut libc::c_char) { + if str.is_null() { + return; + } + unsafe { + _ = CString::from_raw(str); + } +} + +fn free_c_xdr(xdr: CXDR) { + if xdr.xdr.is_null() { + return; + } + unsafe { + let _ = Vec::from_raw_parts(xdr.xdr, xdr.len, xdr.len); + } +} + +fn free_c_xdr_array(xdr_array: CXDRVector) { + if xdr_array.array.is_null() { + return; + } + unsafe { + let v = Vec::from_raw_parts(xdr_array.array, xdr_array.len, xdr_array.len); + for xdr in v { + free_c_xdr(xdr); + } + } +} + +fn from_c_string(str: *const libc::c_char) -> String { + let c_str = unsafe { CStr::from_ptr(str) }; + c_str.to_str().unwrap().to_string() +} + +fn from_c_xdr(xdr: CXDR) -> Vec { + let s = unsafe { slice::from_raw_parts(xdr.xdr, xdr.len) }; + s.to_vec() +} + +// Functions imported from Golang +extern "C" { + // Free Strings returned from Go functions + fn FreeGoXDR(xdr: CXDR); + // LedgerKey XDR in base64 string to LedgerEntry XDR in base64 string + fn SnapshotSourceGet(handle: libc::uintptr_t, ledger_key: CXDR) -> CXDR; +} + +struct GoLedgerStorage { + golang_handle: libc::uintptr_t, + current_ledger_sequence: u32, +} + +impl GoLedgerStorage { + // Get the XDR, regardless of ttl + fn get_xdr_internal( + &self, + key_xdr: &mut Vec, + ) -> std::result::Result, ledger_storage::Error> { + let key_c_xdr = CXDR { + xdr: key_xdr.as_mut_ptr(), + len: key_xdr.len(), + }; + let res = unsafe { SnapshotSourceGet(self.golang_handle, key_c_xdr) }; + if res.xdr.is_null() { + return Err(ledger_storage::Error::NotFound); + } + let v = from_c_xdr(res); + unsafe { FreeGoXDR(res) }; + Ok(v) + } +} + +impl ledger_storage::LedgerGetter for GoLedgerStorage { + fn get( + &self, + key: &LedgerKey, + include_not_live: bool, + ) -> std::result::Result<(LedgerEntry, Option), ledger_storage::Error> { + let mut key_xdr = key.to_xdr(Limits::none())?; + let xdr = self.get_xdr_internal(&mut key_xdr)?; + + let live_until_ledger_seq = match key { + // TODO: it would probably be more efficient to do all of this in the Go side + // (e.g. it would allow us to query multiple entries at once) + LedgerKey::ContractData(_) | LedgerKey::ContractCode(_) => { + let key_hash: [u8; 32] = Sha256::digest(key_xdr).into(); + let ttl_key = LedgerKey::Ttl(LedgerKeyTtl { + key_hash: Hash(key_hash), + }); + let mut ttl_key_xdr = ttl_key.to_xdr(Limits::none())?; + let ttl_entry_xdr = self.get_xdr_internal(&mut ttl_key_xdr)?; + let ttl_entry = LedgerEntry::from_xdr(ttl_entry_xdr, Limits::none())?; + if let LedgerEntryData::Ttl(TtlEntry { + live_until_ledger_seq, + .. + }) = ttl_entry.data + { + Some(live_until_ledger_seq) + } else { + return Err(ledger_storage::Error::UnexpectedLedgerEntryTypeForTtlKey { + ledger_entry_type: ttl_entry.data.name().to_string(), + }); + } + } + _ => None, + }; + + if !include_not_live + && live_until_ledger_seq.is_some() + && !is_live(live_until_ledger_seq.unwrap(), self.current_ledger_sequence) + { + return Err(ledger_storage::Error::NotLive); + } + + let entry = LedgerEntry::from_xdr(xdr, Limits::none())?; + Ok((entry, live_until_ledger_seq)) + } +} + +pub(crate) fn is_live(live_until_ledger_seq: u32, current_ledger_seq: u32) -> bool { + live_until_ledger_seq >= current_ledger_seq +} diff --git a/cmd/soroban-rpc/main.go b/cmd/soroban-rpc/main.go new file mode 100644 index 00000000..130ea78d --- /dev/null +++ b/cmd/soroban-rpc/main.go @@ -0,0 +1,86 @@ +package main + +import ( + "fmt" + "os" + + "github.com/spf13/cobra" + goxdr "github.com/stellar/go/xdr" + + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/config" + "github.com/stellar/soroban-tools/cmd/soroban-rpc/internal/daemon" +) + +func main() { + var cfg config.Config + + rootCmd := &cobra.Command{ + Use: "soroban-rpc", + Short: "Start the remote soroban-rpc server", + Run: func(_ *cobra.Command, _ []string) { + if err := cfg.SetValues(os.LookupEnv); err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } + if err := cfg.Validate(); err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } + daemon.MustNew(&cfg).Run() + }, + } + + versionCmd := &cobra.Command{ + Use: "version", + Short: "Print version information and exit", + Run: func(_ *cobra.Command, _ []string) { + if config.CommitHash == "" { + fmt.Printf("soroban-rpc dev\n") + } else { + // avoid printing the branch for the main branch + // ( since that's what the end-user would typically have ) + // but keep it for internal build ( so that we'll know from which branch it + // was built ) + branch := config.Branch + if branch == "main" { + branch = "" + } + fmt.Printf("soroban-rpc %s (%s) %s\n", config.Version, config.CommitHash, branch) + } + fmt.Printf("stellar-xdr %s\n", goxdr.CommitHash) + }, + } + + genConfigFileCmd := &cobra.Command{ + Use: "gen-config-file", + Short: "Generate a config file with default settings", + Run: func(_ *cobra.Command, _ []string) { + // We can't call 'Validate' here because the config file we are + // generating might not be complete. e.g. It might not include a network passphrase. + if err := cfg.SetValues(os.LookupEnv); err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } + out, err := cfg.MarshalTOML() + if err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } + fmt.Println(string(out)) + }, + } + + rootCmd.AddCommand(versionCmd) + rootCmd.AddCommand(genConfigFileCmd) + + if err := cfg.AddFlags(rootCmd); err != nil { + fmt.Fprintf(os.Stderr, "could not parse config options: %v\n", err) + os.Exit(1) + } + + if err := rootCmd.Execute(); err != nil { + fmt.Fprintf(os.Stderr, "could not run: %v\n", err) + + os.Exit(1) + } +} diff --git a/docs/MONITORING.md b/docs/MONITORING.md new file mode 100644 index 00000000..2a511198 --- /dev/null +++ b/docs/MONITORING.md @@ -0,0 +1,61 @@ +# Monitoring and Tuning Guide for soroban-rpc + +## Introduction + +This document provides a comprehensive guide to monitoring and tuning soroban-rpc, a backend server that communicates using the jrpc (JSON-RPC) protocol over HTTP. To ensure high +availability, high performance, and efficient resource utilization, soroban-rpc incorporates various features like limiting concurrent requests, controlling execution times, and providing +warning and limiting mechanisms. This guide aims to help operators effectively monitor the server, detect potential issues, and apply tuning strategies to maintain optimal performance. + +## Monitoring Metrics + +To ensure the smooth operation of soroban-rpc, several key metrics should be monitored continuously: + +1. **Global Inflight Requests (Concurrent HTTP Requests)**: Monitor the number of concurrent HTTP requests being enqueued at the HTTP endpoint. This metric is tracked via the + `global_inflight_requests` gauge. If this number reaches the predefined limit, an HTTP 503 error is generated. This metric helps identify if the server is reaching its limit in handling + incoming requests. + +2. **Method-specific Inflight Requests (Concurrent JRPC Requests)**: Track the number of concurrent JRPC requests for each method using the `_inflight_requests` gauge. This + allows you to limit the workload of specific methods in case the server runs out of resources. + +3. **HTTP Request Duration**: Monitor the duration taken to process each HTTP request. This metric helps identify if any requests are taking too long to process and may lead to potential + performance issues. If the duration limit is reached, an HTTP 504 error is generated. The total number of warnings generated is tracked by the + `global_request_execution_duration_threshold_warning` counter, and the number of terminated methods is tracked via the `global_request_execution_duration_threshold_limit` counter. + +4. **Method-specific Execution Warnings and Limits**: Measure the execution time of each method and compare it against the predefined threshold. Track the execution warnings using the + `_execution_threshold_warning` counter and the execution limits using the `_execution_threshold_limit` counter. These metrics help operators identify + slow-performing methods and set execution limits to prevent resource exhaustion. + +## Best Practices + +Follow these best practices to maintain a stable and performant soroban-rpc deployment: + +1. **Set Sensible Limits**: Determine appropriate limits for concurrent requests, method execution times, and HTTP request duration based on your server's resources and expected workload. + Avoid overly restrictive limits that may hinder normal operations. + +2. **Logging and Alerts**: The soroban-rpc comes ready with logging and metric endpoint, which reports operational status. On your end, develop the toolings that would allow you to be aware of these events. These toolings could be Grafana alerts, log scraping or similar tools. + +3. **Load Testing**: Regularly conduct load testing to assess the server's performance under varying workloads. Use this data to adjust limits and execution times as needed. + +4. **Scaling Strategies**: Plan scaling strategies for both vertical and horizontal scaling. Vertical scaling involves upgrading hardware resources like CPU, memory, and disk, while + horizontal scaling uses HTTP-aware load balancers to distribute the load across multiple machines. + +## Tuning Suggestions + +When monitoring the resource utilization and identifying gradual increases in method execution times, consider the following tuning suggestions: + +1. **Vertical Tuning**: + + - Increase CPU resources: Faster processors can reduce method execution times, improving overall performance. + - Add Memory: Sufficient memory helps reduce disk I/O and can optimize processing times. + - Use Faster Disk: SSDs or faster disk technologies can significantly improve I/O performance. + +2. **Horizontal Tuning**: + + - Employ HTTP-Aware Load Balancers: Use load balancers that are aware of HTTP error codes and response times. This enables effective distribution of requests across multiple instances + while considering their respective loads and response times. + +3. **Quantitative Tuning**: + - Adjust Concurrency Levels: Fine-tune the concurrency limits for specific methods based on their individual resource requirements and importance. This allows you to prioritize critical + methods and prevent resource contention. + - Limit Execution Times: Set appropriate execution time limits for methods, ensuring that no single method consumes excessive resources. + - Divide and Conquer: Create several service performance groups, allowing a subset of the users to receive a favorable method execution times. diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 00000000..ce8ce2e1 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,3 @@ +# Auto-generated docs + +The docs in this folder are auto-generated. Do not edit them manually or your changes will be lost! diff --git a/docs/soroban-cli-full-docs.md b/docs/soroban-cli-full-docs.md new file mode 100644 index 00000000..3546594c --- /dev/null +++ b/docs/soroban-cli-full-docs.md @@ -0,0 +1,1326 @@ +# Command-Line Help for `soroban` + +This document contains the help content for the `soroban` command-line program. + +**Command Overview:** + +* [`soroban`↴](#soroban) +* [`soroban completion`↴](#soroban-completion) +* [`soroban config`↴](#soroban-config) +* [`soroban config network`↴](#soroban-config-network) +* [`soroban config network add`↴](#soroban-config-network-add) +* [`soroban config network rm`↴](#soroban-config-network-rm) +* [`soroban config network ls`↴](#soroban-config-network-ls) +* [`soroban config identity`↴](#soroban-config-identity) +* [`soroban config identity add`↴](#soroban-config-identity-add) +* [`soroban config identity address`↴](#soroban-config-identity-address) +* [`soroban config identity fund`↴](#soroban-config-identity-fund) +* [`soroban config identity generate`↴](#soroban-config-identity-generate) +* [`soroban config identity ls`↴](#soroban-config-identity-ls) +* [`soroban config identity rm`↴](#soroban-config-identity-rm) +* [`soroban config identity show`↴](#soroban-config-identity-show) +* [`soroban contract`↴](#soroban-contract) +* [`soroban contract asset`↴](#soroban-contract-asset) +* [`soroban contract asset id`↴](#soroban-contract-asset-id) +* [`soroban contract asset deploy`↴](#soroban-contract-asset-deploy) +* [`soroban contract bindings`↴](#soroban-contract-bindings) +* [`soroban contract bindings json`↴](#soroban-contract-bindings-json) +* [`soroban contract bindings rust`↴](#soroban-contract-bindings-rust) +* [`soroban contract bindings typescript`↴](#soroban-contract-bindings-typescript) +* [`soroban contract build`↴](#soroban-contract-build) +* [`soroban contract extend`↴](#soroban-contract-extend) +* [`soroban contract deploy`↴](#soroban-contract-deploy) +* [`soroban contract fetch`↴](#soroban-contract-fetch) +* [`soroban contract id`↴](#soroban-contract-id) +* [`soroban contract id asset`↴](#soroban-contract-id-asset) +* [`soroban contract id wasm`↴](#soroban-contract-id-wasm) +* [`soroban contract inspect`↴](#soroban-contract-inspect) +* [`soroban contract install`↴](#soroban-contract-install) +* [`soroban contract invoke`↴](#soroban-contract-invoke) +* [`soroban contract optimize`↴](#soroban-contract-optimize) +* [`soroban contract read`↴](#soroban-contract-read) +* [`soroban contract restore`↴](#soroban-contract-restore) +* [`soroban events`↴](#soroban-events) +* [`soroban keys`↴](#soroban-keys) +* [`soroban keys add`↴](#soroban-keys-add) +* [`soroban keys address`↴](#soroban-keys-address) +* [`soroban keys fund`↴](#soroban-keys-fund) +* [`soroban keys generate`↴](#soroban-keys-generate) +* [`soroban keys ls`↴](#soroban-keys-ls) +* [`soroban keys rm`↴](#soroban-keys-rm) +* [`soroban keys show`↴](#soroban-keys-show) +* [`soroban lab`↴](#soroban-lab) +* [`soroban lab token`↴](#soroban-lab-token) +* [`soroban lab token wrap`↴](#soroban-lab-token-wrap) +* [`soroban lab token id`↴](#soroban-lab-token-id) +* [`soroban lab xdr`↴](#soroban-lab-xdr) +* [`soroban lab xdr types`↴](#soroban-lab-xdr-types) +* [`soroban lab xdr types list`↴](#soroban-lab-xdr-types-list) +* [`soroban lab xdr guess`↴](#soroban-lab-xdr-guess) +* [`soroban lab xdr decode`↴](#soroban-lab-xdr-decode) +* [`soroban lab xdr encode`↴](#soroban-lab-xdr-encode) +* [`soroban lab xdr version`↴](#soroban-lab-xdr-version) +* [`soroban network`↴](#soroban-network) +* [`soroban network add`↴](#soroban-network-add) +* [`soroban network rm`↴](#soroban-network-rm) +* [`soroban network ls`↴](#soroban-network-ls) +* [`soroban version`↴](#soroban-version) + +## `soroban` + +Build, deploy, & interact with contracts; set identities to sign with; configure networks; generate keys; and more. + +Intro: https://soroban.stellar.org +CLI Reference: https://github.com/stellar/soroban-tools/tree/main/docs/soroban-cli-full-docs.md + +The easiest way to get started is to generate a new identity: + + soroban config identity generate alice + +You can use identities with the `--source` flag in other commands later. + +Commands that relate to smart contract interactions are organized under the `contract` subcommand. List them: + + soroban contract --help + +A Soroban contract has its interface schema types embedded in the binary that gets deployed on-chain, making it possible to dynamically generate a custom CLI for each. `soroban contract invoke` makes use of this: + + soroban contract invoke --id CCR6QKTWZQYW6YUJ7UP7XXZRLWQPFRV6SWBLQS4ZQOSAF4BOUD77OTE2 --source alice --network testnet -- --help + +Anything after the `--` double dash (the "slop") is parsed as arguments to the contract-specific CLI, generated on-the-fly from the embedded schema. For the hello world example, with a function called `hello` that takes one string argument `to`, here's how you invoke it: + + soroban contract invoke --id CCR6QKTWZQYW6YUJ7UP7XXZRLWQPFRV6SWBLQS4ZQOSAF4BOUD77OTE2 --source alice --network testnet -- hello --to world + +Full CLI reference: https://github.com/stellar/soroban-tools/tree/main/docs/soroban-cli-full-docs.md + +**Usage:** `soroban [OPTIONS] ` + +###### **Subcommands:** + +* `completion` — Print shell completion code for the specified shell +* `config` — Deprecated, use `soroban keys` and `soroban network` instead +* `contract` — Tools for smart contract developers +* `events` — Watch the network for contract events +* `keys` — Create and manage identities including keys and addresses +* `lab` — Experiment with early features and expert tools +* `network` — Start and configure networks +* `version` — Print version information + +###### **Options:** + +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." +* `-f`, `--filter-logs ` — Filter logs output. To turn on "soroban_cli::log::footprint=debug" or off "=off". Can also use env var `RUST_LOG` +* `-q`, `--quiet` — Do not write logs to stderr including `INFO` +* `-v`, `--verbose` — Log DEBUG events +* `--very-verbose` — Log DEBUG and TRACE events +* `--list` — List installed plugins. E.g. `soroban-hello` + + + +## `soroban completion` + +Print shell completion code for the specified shell + +Ensure the completion package for your shell is installed, +e.g., bash-completion for bash. + +To enable autocomplete in the current bash shell, run: + source <(soroban completion --shell bash) + +To enable autocomplete permanently, run: + echo "source <(soroban completion --shell bash)" >> ~/.bashrc + +**Usage:** `soroban completion --shell ` + +###### **Options:** + +* `--shell ` — The shell type + + Possible values: `bash`, `elvish`, `fish`, `powershell`, `zsh` + + + + +## `soroban config` + +Deprecated, use `soroban keys` and `soroban network` instead + +**Usage:** `soroban config ` + +###### **Subcommands:** + +* `network` — Configure different networks. Depraecated, use `soroban network` instead +* `identity` — Identity management. Deprecated, use `soroban keys` instead + + + +## `soroban config network` + +Configure different networks. Depraecated, use `soroban network` instead + +**Usage:** `soroban config network ` + +###### **Subcommands:** + +* `add` — Add a new network +* `rm` — Remove a network +* `ls` — List networks + + + +## `soroban config network add` + +Add a new network + +**Usage:** `soroban config network add [OPTIONS] --rpc-url --network-passphrase ` + +###### **Arguments:** + +* `` — Name of network + +###### **Options:** + +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." + + + +## `soroban config network rm` + +Remove a network + +**Usage:** `soroban config network rm [OPTIONS] ` + +###### **Arguments:** + +* `` — Network to remove + +###### **Options:** + +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." + + + +## `soroban config network ls` + +List networks + +**Usage:** `soroban config network ls [OPTIONS]` + +###### **Options:** + +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." +* `-l`, `--long` — Get more info about the networks + + + +## `soroban config identity` + +Identity management. Deprecated, use `soroban keys` instead + +**Usage:** `soroban config identity ` + +###### **Subcommands:** + +* `add` — Add a new identity (keypair, ledger, macOS keychain) +* `address` — Given an identity return its address (public key) +* `fund` — Fund an identity on a test network +* `generate` — Generate a new identity with a seed phrase, currently 12 words +* `ls` — List identities +* `rm` — Remove an identity +* `show` — Given an identity return its private key + + + +## `soroban config identity add` + +Add a new identity (keypair, ledger, macOS keychain) + +**Usage:** `soroban config identity add [OPTIONS] ` + +###### **Arguments:** + +* `` — Name of identity + +###### **Options:** + +* `--secret-key` — Add using secret_key Can provide with SOROBAN_SECRET_KEY +* `--seed-phrase` — Add using 12 word seed phrase to generate secret_key +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." + + + +## `soroban config identity address` + +Given an identity return its address (public key) + +**Usage:** `soroban config identity address [OPTIONS] ` + +###### **Arguments:** + +* `` — Name of identity to lookup, default test identity used if not provided + +###### **Options:** + +* `--hd-path ` — If identity is a seed phrase use this hd path, default is 0 +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." + + + +## `soroban config identity fund` + +Fund an identity on a test network + +**Usage:** `soroban config identity fund [OPTIONS] ` + +###### **Arguments:** + +* `` — Name of identity to lookup, default test identity used if not provided + +###### **Options:** + +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--network ` — Name of network to use from config +* `--hd-path ` — If identity is a seed phrase use this hd path, default is 0 +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." + + + +## `soroban config identity generate` + +Generate a new identity with a seed phrase, currently 12 words + +**Usage:** `soroban config identity generate [OPTIONS] ` + +###### **Arguments:** + +* `` — Name of identity + +###### **Options:** + +* `--no-fund` — Do not fund address +* `--seed ` — Optional seed to use when generating seed phrase. Random otherwise +* `-s`, `--as-secret` — Output the generated identity as a secret key +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." +* `--hd-path ` — When generating a secret key, which hd_path should be used from the original seed_phrase +* `-d`, `--default-seed` — Generate the default seed phrase. Useful for testing. Equivalent to --seed 0000000000000000 +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--network ` — Name of network to use from config + + + +## `soroban config identity ls` + +List identities + +**Usage:** `soroban config identity ls [OPTIONS]` + +###### **Options:** + +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." +* `-l`, `--long` + + + +## `soroban config identity rm` + +Remove an identity + +**Usage:** `soroban config identity rm [OPTIONS] ` + +###### **Arguments:** + +* `` — Identity to remove + +###### **Options:** + +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." + + + +## `soroban config identity show` + +Given an identity return its private key + +**Usage:** `soroban config identity show [OPTIONS] ` + +###### **Arguments:** + +* `` — Name of identity to lookup, default is test identity + +###### **Options:** + +* `--hd-path ` — If identity is a seed phrase use this hd path, default is 0 +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." + + + +## `soroban contract` + +Tools for smart contract developers + +**Usage:** `soroban contract ` + +###### **Subcommands:** + +* `asset` — Utilities to deploy a Stellar Asset Contract or get its id +* `bindings` — Generate code client bindings for a contract +* `build` — Build a contract from source +* `extend` — Extend the time to live ledger of a contract-data ledger entry +* `deploy` — Deploy a wasm contract +* `fetch` — Fetch a contract's Wasm binary +* `id` — Generate the contract id for a given contract or asset +* `inspect` — Inspect a WASM file listing contract functions, meta, etc +* `install` — Install a WASM file to the ledger without creating a contract instance +* `invoke` — Invoke a contract function +* `optimize` — Optimize a WASM file +* `read` — Print the current value of a contract-data ledger entry +* `restore` — Restore an evicted value for a contract-data legder entry + + + +## `soroban contract asset` + +Utilities to deploy a Stellar Asset Contract or get its id + +**Usage:** `soroban contract asset ` + +###### **Subcommands:** + +* `id` — Get Id of builtin Soroban Asset Contract. Deprecated, use `soroban contract id asset` instead +* `deploy` — Deploy builtin Soroban Asset Contract + + + +## `soroban contract asset id` + +Get Id of builtin Soroban Asset Contract. Deprecated, use `soroban contract id asset` instead + +**Usage:** `soroban contract asset id [OPTIONS] --asset --source-account ` + +###### **Options:** + +* `--asset ` — ID of the Stellar classic asset to wrap, e.g. "USDC:G...5" +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--network ` — Name of network to use from config +* `--source-account ` — Account that signs the final transaction. Alias `source`. Can be an identity (--source alice), a secret key (--source SC36…), or a seed phrase (--source "kite urban…"). Default: `identity generate --default-seed` +* `--hd-path ` — If using a seed phrase, which hierarchical deterministic path to use, e.g. `m/44'/148'/{hd_path}`. Example: `--hd-path 1`. Default: `0` +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." + + + +## `soroban contract asset deploy` + +Deploy builtin Soroban Asset Contract + +**Usage:** `soroban contract asset deploy [OPTIONS] --asset --source-account ` + +###### **Options:** + +* `--asset ` — ID of the Stellar classic asset to wrap, e.g. "USDC:G...5" +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--network ` — Name of network to use from config +* `--source-account ` — Account that signs the final transaction. Alias `source`. Can be an identity (--source alice), a secret key (--source SC36…), or a seed phrase (--source "kite urban…"). Default: `identity generate --default-seed` +* `--hd-path ` — If using a seed phrase, which hierarchical deterministic path to use, e.g. `m/44'/148'/{hd_path}`. Example: `--hd-path 1`. Default: `0` +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." +* `--fee ` — fee amount for transaction, in stroops. 1 stroop = 0.0000001 xlm + + Default value: `100` + + + +## `soroban contract bindings` + +Generate code client bindings for a contract + +**Usage:** `soroban contract bindings ` + +###### **Subcommands:** + +* `json` — Generate Json Bindings +* `rust` — Generate Rust bindings +* `typescript` — Generate a TypeScript / JavaScript package + + + +## `soroban contract bindings json` + +Generate Json Bindings + +**Usage:** `soroban contract bindings json --wasm ` + +###### **Options:** + +* `--wasm ` — Path to wasm binary + + + +## `soroban contract bindings rust` + +Generate Rust bindings + +**Usage:** `soroban contract bindings rust --wasm ` + +###### **Options:** + +* `--wasm ` — Path to wasm binary + + + +## `soroban contract bindings typescript` + +Generate a TypeScript / JavaScript package + +**Usage:** `soroban contract bindings typescript [OPTIONS] --output-dir --contract-id ` + +###### **Options:** + +* `--wasm ` — Path to optional wasm binary +* `--output-dir ` — Where to place generated project +* `--overwrite` — Whether to overwrite output directory if it already exists +* `--contract-id ` — The contract ID/address on the network +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--network ` — Name of network to use from config + + + +## `soroban contract build` + +Build a contract from source + +Builds all crates that are referenced by the cargo manifest (Cargo.toml) that have cdylib as their crate-type. Crates are built for the wasm32 target. Unless configured otherwise, crates are built with their default features and with their release profile. + +To view the commands that will be executed, without executing them, use the --print-commands-only option. + +**Usage:** `soroban contract build [OPTIONS]` + +###### **Options:** + +* `--manifest-path ` — Path to Cargo.toml + + Default value: `Cargo.toml` +* `--package ` — Package to build +* `--profile ` — Build with the specified profile + + Default value: `release` +* `--features ` — Build with the list of features activated, space or comma separated +* `--all-features` — Build with the all features activated +* `--no-default-features` — Build with the default feature not activated +* `--out-dir ` — Directory to copy wasm files to +* `--print-commands-only` — Print commands to build without executing them + + + +## `soroban contract extend` + +Extend the time to live ledger of a contract-data ledger entry. + +If no keys are specified the contract itself is extended. + +**Usage:** `soroban contract extend [OPTIONS] --ledgers-to-extend --durability --source-account ` + +###### **Options:** + +* `--ledgers-to-extend ` — Number of ledgers to extend the entries +* `--ttl-ledger-only` — Only print the new Time To Live ledger +* `--id ` — Contract ID to which owns the data entries. If no keys provided the Contract's instance will be extended +* `--key ` — Storage key (symbols only) +* `--key-xdr ` — Storage key (base64-encoded XDR) +* `--wasm ` — Path to Wasm file of contract code to extend +* `--wasm-hash ` — Path to Wasm file of contract code to extend +* `--durability ` — Storage entry durability + + Possible values: + - `persistent`: + Persistent + - `temporary`: + Temporary + +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--network ` — Name of network to use from config +* `--source-account ` — Account that signs the final transaction. Alias `source`. Can be an identity (--source alice), a secret key (--source SC36…), or a seed phrase (--source "kite urban…"). Default: `identity generate --default-seed` +* `--hd-path ` — If using a seed phrase, which hierarchical deterministic path to use, e.g. `m/44'/148'/{hd_path}`. Example: `--hd-path 1`. Default: `0` +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." +* `--fee ` — fee amount for transaction, in stroops. 1 stroop = 0.0000001 xlm + + Default value: `100` + + + +## `soroban contract deploy` + +Deploy a wasm contract + +**Usage:** `soroban contract deploy [OPTIONS] --source-account <--wasm |--wasm-hash >` + +###### **Options:** + +* `--wasm ` — WASM file to deploy +* `--wasm-hash ` — Hash of the already installed/deployed WASM file +* `--salt ` — Custom salt 32-byte salt for the token id +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--network ` — Name of network to use from config +* `--source-account ` — Account that signs the final transaction. Alias `source`. Can be an identity (--source alice), a secret key (--source SC36…), or a seed phrase (--source "kite urban…"). Default: `identity generate --default-seed` +* `--hd-path ` — If using a seed phrase, which hierarchical deterministic path to use, e.g. `m/44'/148'/{hd_path}`. Example: `--hd-path 1`. Default: `0` +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." +* `--fee ` — fee amount for transaction, in stroops. 1 stroop = 0.0000001 xlm + + Default value: `100` +* `-i`, `--ignore-checks` — Whether to ignore safety checks when deploying contracts + + Default value: `false` + + + +## `soroban contract fetch` + +Fetch a contract's Wasm binary + +**Usage:** `soroban contract fetch [OPTIONS] --id ` + +###### **Options:** + +* `--id ` — Contract ID to fetch +* `-o`, `--out-file ` — Where to write output otherwise stdout is used +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--network ` — Name of network to use from config + + + +## `soroban contract id` + +Generate the contract id for a given contract or asset + +**Usage:** `soroban contract id ` + +###### **Subcommands:** + +* `asset` — Deploy builtin Soroban Asset Contract +* `wasm` — Deploy normal Wasm Contract + + + +## `soroban contract id asset` + +Deploy builtin Soroban Asset Contract + +**Usage:** `soroban contract id asset [OPTIONS] --asset --source-account ` + +###### **Options:** + +* `--asset ` — ID of the Stellar classic asset to wrap, e.g. "USDC:G...5" +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--network ` — Name of network to use from config +* `--source-account ` — Account that signs the final transaction. Alias `source`. Can be an identity (--source alice), a secret key (--source SC36…), or a seed phrase (--source "kite urban…"). Default: `identity generate --default-seed` +* `--hd-path ` — If using a seed phrase, which hierarchical deterministic path to use, e.g. `m/44'/148'/{hd_path}`. Example: `--hd-path 1`. Default: `0` +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." + + + +## `soroban contract id wasm` + +Deploy normal Wasm Contract + +**Usage:** `soroban contract id wasm [OPTIONS] --salt --source-account ` + +###### **Options:** + +* `--salt ` — ID of the Soroban contract +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--network ` — Name of network to use from config +* `--source-account ` — Account that signs the final transaction. Alias `source`. Can be an identity (--source alice), a secret key (--source SC36…), or a seed phrase (--source "kite urban…"). Default: `identity generate --default-seed` +* `--hd-path ` — If using a seed phrase, which hierarchical deterministic path to use, e.g. `m/44'/148'/{hd_path}`. Example: `--hd-path 1`. Default: `0` +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." + + + +## `soroban contract inspect` + +Inspect a WASM file listing contract functions, meta, etc + +**Usage:** `soroban contract inspect [OPTIONS] --wasm ` + +###### **Options:** + +* `--wasm ` — Path to wasm binary +* `--output ` — Output just XDR in base64 + + Default value: `docs` + + Possible values: + - `xdr-base64`: + XDR of array of contract spec entries + - `xdr-base64-array`: + Array of xdr of contract spec entries + - `docs`: + Pretty print of contract spec entries + +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." + + + +## `soroban contract install` + +Install a WASM file to the ledger without creating a contract instance + +**Usage:** `soroban contract install [OPTIONS] --source-account --wasm ` + +###### **Options:** + +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--network ` — Name of network to use from config +* `--source-account ` — Account that signs the final transaction. Alias `source`. Can be an identity (--source alice), a secret key (--source SC36…), or a seed phrase (--source "kite urban…"). Default: `identity generate --default-seed` +* `--hd-path ` — If using a seed phrase, which hierarchical deterministic path to use, e.g. `m/44'/148'/{hd_path}`. Example: `--hd-path 1`. Default: `0` +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." +* `--fee ` — fee amount for transaction, in stroops. 1 stroop = 0.0000001 xlm + + Default value: `100` +* `--wasm ` — Path to wasm binary +* `-i`, `--ignore-checks` — Whether to ignore safety checks when deploying contracts + + Default value: `false` + + + +## `soroban contract invoke` + +Invoke a contract function + +Generates an "implicit CLI" for the specified contract on-the-fly using the contract's schema, which gets embedded into every Soroban contract. The "slop" in this command, everything after the `--`, gets passed to this implicit CLI. Get in-depth help for a given contract: + +soroban contract invoke ... -- --help + +**Usage:** `soroban contract invoke [OPTIONS] --id --source-account [-- ...]` + +###### **Arguments:** + +* `` — Function name as subcommand, then arguments for that function as `--arg-name value` + +###### **Options:** + +* `--id ` — Contract ID to invoke +* `--cost` — Output the cost execution to stderr +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--network ` — Name of network to use from config +* `--source-account ` — Account that signs the final transaction. Alias `source`. Can be an identity (--source alice), a secret key (--source SC36…), or a seed phrase (--source "kite urban…"). Default: `identity generate --default-seed` +* `--hd-path ` — If using a seed phrase, which hierarchical deterministic path to use, e.g. `m/44'/148'/{hd_path}`. Example: `--hd-path 1`. Default: `0` +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." +* `--fee ` — fee amount for transaction, in stroops. 1 stroop = 0.0000001 xlm + + Default value: `100` + + + +## `soroban contract optimize` + +Optimize a WASM file + +**Usage:** `soroban contract optimize [OPTIONS] --wasm ` + +###### **Options:** + +* `--wasm ` — Path to wasm binary +* `--wasm-out ` — Path to write the optimized WASM file to (defaults to same location as --wasm with .optimized.wasm suffix) + + + +## `soroban contract read` + +Print the current value of a contract-data ledger entry + +**Usage:** `soroban contract read [OPTIONS] --durability --source-account ` + +###### **Options:** + +* `--output ` — Type of output to generate + + Default value: `string` + + Possible values: + - `string`: + String + - `json`: + Json + - `xdr`: + XDR + +* `--id ` — Contract ID to which owns the data entries. If no keys provided the Contract's instance will be extended +* `--key ` — Storage key (symbols only) +* `--key-xdr ` — Storage key (base64-encoded XDR) +* `--wasm ` — Path to Wasm file of contract code to extend +* `--wasm-hash ` — Path to Wasm file of contract code to extend +* `--durability ` — Storage entry durability + + Possible values: + - `persistent`: + Persistent + - `temporary`: + Temporary + +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--network ` — Name of network to use from config +* `--source-account ` — Account that signs the final transaction. Alias `source`. Can be an identity (--source alice), a secret key (--source SC36…), or a seed phrase (--source "kite urban…"). Default: `identity generate --default-seed` +* `--hd-path ` — If using a seed phrase, which hierarchical deterministic path to use, e.g. `m/44'/148'/{hd_path}`. Example: `--hd-path 1`. Default: `0` +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." + + + +## `soroban contract restore` + +Restore an evicted value for a contract-data legder entry. + +If no keys are specificed the contract itself is restored. + +**Usage:** `soroban contract restore [OPTIONS] --durability --source-account ` + +###### **Options:** + +* `--id ` — Contract ID to which owns the data entries. If no keys provided the Contract's instance will be extended +* `--key ` — Storage key (symbols only) +* `--key-xdr ` — Storage key (base64-encoded XDR) +* `--wasm ` — Path to Wasm file of contract code to extend +* `--wasm-hash ` — Path to Wasm file of contract code to extend +* `--durability ` — Storage entry durability + + Possible values: + - `persistent`: + Persistent + - `temporary`: + Temporary + +* `--ledgers-to-extend ` — Number of ledgers to extend the entry +* `--ttl-ledger-only` — Only print the new Time To Live ledger +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--network ` — Name of network to use from config +* `--source-account ` — Account that signs the final transaction. Alias `source`. Can be an identity (--source alice), a secret key (--source SC36…), or a seed phrase (--source "kite urban…"). Default: `identity generate --default-seed` +* `--hd-path ` — If using a seed phrase, which hierarchical deterministic path to use, e.g. `m/44'/148'/{hd_path}`. Example: `--hd-path 1`. Default: `0` +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." +* `--fee ` — fee amount for transaction, in stroops. 1 stroop = 0.0000001 xlm + + Default value: `100` + + + +## `soroban events` + +Watch the network for contract events + +**Usage:** `soroban events [OPTIONS]` + +###### **Options:** + +* `--start-ledger ` — The first ledger sequence number in the range to pull events https://developers.stellar.org/docs/encyclopedia/ledger-headers#ledger-sequence +* `--cursor ` — The cursor corresponding to the start of the event range +* `--output ` — Output formatting options for event stream + + Default value: `pretty` + + Possible values: + - `pretty`: + Colorful, human-oriented console output + - `plain`: + Human-oriented console output without colors + - `json`: + JSONified console output + +* `-c`, `--count ` — The maximum number of events to display (defer to the server-defined limit) + + Default value: `10` +* `--id ` — A set of (up to 5) contract IDs to filter events on. This parameter can be passed multiple times, e.g. `--id C123.. --id C456..`, or passed with multiple parameters, e.g. `--id C123 C456` +* `--topic ` — A set of (up to 4) topic filters to filter event topics on. A single topic filter can contain 1-4 different segment filters, separated by commas, with an asterisk (* character) indicating a wildcard segment +* `--type ` — Specifies which type of contract events to display + + Default value: `all` + + Possible values: `all`, `contract`, `system` + +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--network ` — Name of network to use from config + + + +## `soroban keys` + +Create and manage identities including keys and addresses + +**Usage:** `soroban keys ` + +###### **Subcommands:** + +* `add` — Add a new identity (keypair, ledger, macOS keychain) +* `address` — Given an identity return its address (public key) +* `fund` — Fund an identity on a test network +* `generate` — Generate a new identity with a seed phrase, currently 12 words +* `ls` — List identities +* `rm` — Remove an identity +* `show` — Given an identity return its private key + + + +## `soroban keys add` + +Add a new identity (keypair, ledger, macOS keychain) + +**Usage:** `soroban keys add [OPTIONS] ` + +###### **Arguments:** + +* `` — Name of identity + +###### **Options:** + +* `--secret-key` — Add using secret_key Can provide with SOROBAN_SECRET_KEY +* `--seed-phrase` — Add using 12 word seed phrase to generate secret_key +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." + + + +## `soroban keys address` + +Given an identity return its address (public key) + +**Usage:** `soroban keys address [OPTIONS] ` + +###### **Arguments:** + +* `` — Name of identity to lookup, default test identity used if not provided + +###### **Options:** + +* `--hd-path ` — If identity is a seed phrase use this hd path, default is 0 +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." + + + +## `soroban keys fund` + +Fund an identity on a test network + +**Usage:** `soroban keys fund [OPTIONS] ` + +###### **Arguments:** + +* `` — Name of identity to lookup, default test identity used if not provided + +###### **Options:** + +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--network ` — Name of network to use from config +* `--hd-path ` — If identity is a seed phrase use this hd path, default is 0 +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." + + + +## `soroban keys generate` + +Generate a new identity with a seed phrase, currently 12 words + +**Usage:** `soroban keys generate [OPTIONS] ` + +###### **Arguments:** + +* `` — Name of identity + +###### **Options:** + +* `--no-fund` — Do not fund address +* `--seed ` — Optional seed to use when generating seed phrase. Random otherwise +* `-s`, `--as-secret` — Output the generated identity as a secret key +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." +* `--hd-path ` — When generating a secret key, which hd_path should be used from the original seed_phrase +* `-d`, `--default-seed` — Generate the default seed phrase. Useful for testing. Equivalent to --seed 0000000000000000 +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--network ` — Name of network to use from config + + + +## `soroban keys ls` + +List identities + +**Usage:** `soroban keys ls [OPTIONS]` + +###### **Options:** + +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." +* `-l`, `--long` + + + +## `soroban keys rm` + +Remove an identity + +**Usage:** `soroban keys rm [OPTIONS] ` + +###### **Arguments:** + +* `` — Identity to remove + +###### **Options:** + +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." + + + +## `soroban keys show` + +Given an identity return its private key + +**Usage:** `soroban keys show [OPTIONS] ` + +###### **Arguments:** + +* `` — Name of identity to lookup, default is test identity + +###### **Options:** + +* `--hd-path ` — If identity is a seed phrase use this hd path, default is 0 +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." + + + +## `soroban lab` + +Experiment with early features and expert tools + +**Usage:** `soroban lab ` + +###### **Subcommands:** + +* `token` — Wrap, create, and manage token contracts +* `xdr` — Decode xdr + + + +## `soroban lab token` + +Wrap, create, and manage token contracts + +**Usage:** `soroban lab token ` + +###### **Subcommands:** + +* `wrap` — Deploy a token contract to wrap an existing Stellar classic asset for smart contract usage Deprecated, use `soroban contract deploy asset` instead +* `id` — Compute the expected contract id for the given asset Deprecated, use `soroban contract id asset` instead + + + +## `soroban lab token wrap` + +Deploy a token contract to wrap an existing Stellar classic asset for smart contract usage Deprecated, use `soroban contract deploy asset` instead + +**Usage:** `soroban lab token wrap [OPTIONS] --asset --source-account ` + +###### **Options:** + +* `--asset ` — ID of the Stellar classic asset to wrap, e.g. "USDC:G...5" +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--network ` — Name of network to use from config +* `--source-account ` — Account that signs the final transaction. Alias `source`. Can be an identity (--source alice), a secret key (--source SC36…), or a seed phrase (--source "kite urban…"). Default: `identity generate --default-seed` +* `--hd-path ` — If using a seed phrase, which hierarchical deterministic path to use, e.g. `m/44'/148'/{hd_path}`. Example: `--hd-path 1`. Default: `0` +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." +* `--fee ` — fee amount for transaction, in stroops. 1 stroop = 0.0000001 xlm + + Default value: `100` + + + +## `soroban lab token id` + +Compute the expected contract id for the given asset Deprecated, use `soroban contract id asset` instead + +**Usage:** `soroban lab token id [OPTIONS] --asset --source-account ` + +###### **Options:** + +* `--asset ` — ID of the Stellar classic asset to wrap, e.g. "USDC:G...5" +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--network ` — Name of network to use from config +* `--source-account ` — Account that signs the final transaction. Alias `source`. Can be an identity (--source alice), a secret key (--source SC36…), or a seed phrase (--source "kite urban…"). Default: `identity generate --default-seed` +* `--hd-path ` — If using a seed phrase, which hierarchical deterministic path to use, e.g. `m/44'/148'/{hd_path}`. Example: `--hd-path 1`. Default: `0` +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." + + + +## `soroban lab xdr` + +Decode xdr + +**Usage:** `soroban lab xdr [CHANNEL] ` + +###### **Subcommands:** + +* `types` — View information about types +* `guess` — Guess the XDR type +* `decode` — Decode XDR +* `encode` — Encode XDR +* `version` — Print version information + +###### **Arguments:** + +* `` — Channel of XDR to operate on + + Default value: `+curr` + + Possible values: `+curr`, `+next` + + + + +## `soroban lab xdr types` + +View information about types + +**Usage:** `soroban lab xdr types ` + +###### **Subcommands:** + +* `list` — + + + +## `soroban lab xdr types list` + +**Usage:** `soroban lab xdr types list [OPTIONS]` + +###### **Options:** + +* `--output ` + + Default value: `plain` + + Possible values: `plain`, `json`, `json-formatted` + + + + +## `soroban lab xdr guess` + +Guess the XDR type + +**Usage:** `soroban lab xdr guess [OPTIONS] [FILE]` + +###### **Arguments:** + +* `` — File to decode, or stdin if omitted + +###### **Options:** + +* `--input ` + + Default value: `single-base64` + + Possible values: `single`, `single-base64`, `stream`, `stream-base64`, `stream-framed` + +* `--output ` + + Default value: `list` + + Possible values: `list` + +* `--certainty ` — Certainty as an arbitrary value + + Default value: `2` + + + +## `soroban lab xdr decode` + +Decode XDR + +**Usage:** `soroban lab xdr decode [OPTIONS] --type [FILES]...` + +###### **Arguments:** + +* `` — Files to decode, or stdin if omitted + +###### **Options:** + +* `--type ` — XDR type to decode +* `--input ` + + Default value: `stream-base64` + + Possible values: `single`, `single-base64`, `stream`, `stream-base64`, `stream-framed` + +* `--output ` + + Default value: `json` + + Possible values: `json`, `json-formatted` + + + + +## `soroban lab xdr encode` + +Encode XDR + +**Usage:** `soroban lab xdr encode [OPTIONS] --type [FILES]...` + +###### **Arguments:** + +* `` — Files to encode, or stdin if omitted + +###### **Options:** + +* `--type ` — XDR type to encode +* `--input ` + + Default value: `json` + + Possible values: `json` + +* `--output ` + + Default value: `single-base64` + + Possible values: `single`, `single-base64` + + + + +## `soroban lab xdr version` + +Print version information + +**Usage:** `soroban lab xdr version` + + + +## `soroban network` + +Start and configure networks + +**Usage:** `soroban network ` + +###### **Subcommands:** + +* `add` — Add a new network +* `rm` — Remove a network +* `ls` — List networks + + + +## `soroban network add` + +Add a new network + +**Usage:** `soroban network add [OPTIONS] --rpc-url --network-passphrase ` + +###### **Arguments:** + +* `` — Name of network + +###### **Options:** + +* `--rpc-url ` — RPC server endpoint +* `--network-passphrase ` — Network passphrase to sign the transaction sent to the rpc server +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." + + + +## `soroban network rm` + +Remove a network + +**Usage:** `soroban network rm [OPTIONS] ` + +###### **Arguments:** + +* `` — Network to remove + +###### **Options:** + +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." + + + +## `soroban network ls` + +List networks + +**Usage:** `soroban network ls [OPTIONS]` + +###### **Options:** + +* `--global` — Use global config +* `--config-dir ` — Location of config directory, default is "." +* `-l`, `--long` — Get more info about the networks + + + +## `soroban version` + +Print version information + +**Usage:** `soroban version` + + + +
+ + + This document was generated automatically by +
clap-markdown. + diff --git a/go.mod b/go.mod new file mode 100644 index 00000000..a8bd3bf4 --- /dev/null +++ b/go.mod @@ -0,0 +1,98 @@ +module github.com/stellar/soroban-tools + +go 1.21 + +toolchain go1.21.1 + +require ( + github.com/Masterminds/squirrel v1.5.4 + github.com/cenkalti/backoff/v4 v4.2.1 + github.com/creachadair/jrpc2 v1.1.2 + github.com/go-chi/chi v4.1.2+incompatible + github.com/go-git/go-git/v5 v5.9.0 + github.com/mattn/go-sqlite3 v1.14.17 + github.com/pelletier/go-toml v1.9.5 + github.com/prometheus/client_golang v1.17.0 + github.com/rs/cors v1.10.1 + github.com/rubenv/sql-migrate v1.5.2 + github.com/sirupsen/logrus v1.9.3 + github.com/spf13/cobra v1.7.0 + github.com/spf13/pflag v1.0.5 + github.com/stellar/go v0.0.0-20240109175136-3ca501f09055 + github.com/stretchr/testify v1.8.4 + golang.org/x/mod v0.13.0 + gotest.tools/v3 v3.5.0 +) + +require ( + dario.cat/mergo v1.0.0 // indirect + github.com/cloudflare/circl v1.3.5 // indirect + github.com/cyphar/filepath-securejoin v0.2.4 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 // indirect + github.com/pelletier/go-toml/v2 v2.1.0 // indirect + github.com/pjbgf/sha1cd v0.3.0 // indirect + github.com/sagikazarmark/locafero v0.3.0 // indirect + github.com/sagikazarmark/slog-shim v0.1.0 // indirect + github.com/skeema/knownhosts v1.2.1 // indirect + github.com/sourcegraph/conc v0.3.0 // indirect + github.com/subosito/gotenv v1.6.0 // indirect + go.uber.org/multierr v1.11.0 // indirect + golang.org/x/tools v0.14.0 // indirect + gopkg.in/ini.v1 v1.67.0 // indirect +) + +require ( + github.com/BurntSushi/toml v1.3.2 // indirect + github.com/Microsoft/go-winio v0.6.1 // indirect + github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c // indirect + github.com/acomagu/bufpipe v1.0.4 // indirect + github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect + github.com/aws/aws-sdk-go v1.45.27 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/creachadair/mds v0.3.0 // indirect + github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/emirpasic/gods v1.18.1 // indirect + github.com/fsnotify/fsnotify v1.6.0 // indirect + github.com/go-errors/errors v1.5.1 // indirect + github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect + github.com/go-git/go-billy/v5 v5.5.0 // indirect + github.com/go-gorp/gorp/v3 v3.1.0 // indirect + github.com/google/go-cmp v0.6.0 // indirect + github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 + github.com/hashicorp/hcl v1.0.0 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect + github.com/jmespath/go-jmespath v0.4.0 // indirect + github.com/jmoiron/sqlx v1.3.5 // indirect + github.com/kevinburke/ssh_config v1.2.0 // indirect + github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect + github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect + github.com/lib/pq v1.10.9 // indirect + github.com/magiconair/properties v1.8.7 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect + github.com/pkg/errors v0.9.1 // indirect + github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/prometheus/client_model v0.5.0 // indirect + github.com/prometheus/common v0.45.0 // indirect + github.com/prometheus/procfs v0.12.0 // indirect + github.com/segmentio/go-loggly v0.5.1-0.20171222203950-eb91657e62b2 // indirect + github.com/sergi/go-diff v1.3.1 // indirect + github.com/spf13/afero v1.10.0 // indirect + github.com/spf13/cast v1.5.1 // indirect + github.com/spf13/viper v1.17.0 // indirect + github.com/stellar/go-xdr v0.0.0-20231122183749-b53fb00bcac2 // indirect + github.com/stretchr/objx v0.5.1 // indirect + github.com/xanzy/ssh-agent v0.3.3 // indirect + golang.org/x/crypto v0.14.0 // indirect + golang.org/x/exp v0.0.0-20231006140011-7918f672742d // indirect + golang.org/x/net v0.17.0 // indirect + golang.org/x/sync v0.5.0 // indirect + golang.org/x/sys v0.13.0 // indirect + golang.org/x/text v0.13.0 // indirect + google.golang.org/protobuf v1.31.0 // indirect + gopkg.in/tylerb/graceful.v1 v1.2.15 // indirect + gopkg.in/warnings.v0 v0.1.2 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 00000000..ed68c009 --- /dev/null +++ b/go.sum @@ -0,0 +1,756 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= +dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= +dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8= +github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/Masterminds/squirrel v1.5.4 h1:uUcX/aBc8O7Fg9kaISIUsHXdKuqehiXAMQTYX8afzqM= +github.com/Masterminds/squirrel v1.5.4/go.mod h1:NNaOrjSoIDfDA40n7sr2tPNZRfjzjA400rg+riTZj10= +github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= +github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= +github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= +github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c h1:kMFnB0vCcX7IL/m9Y5LO+KQYv+t1CQOiFe6+SV2J7bE= +github.com/ProtonMail/go-crypto v0.0.0-20230923063757-afb1ddc0824c/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= +github.com/acomagu/bufpipe v1.0.4 h1:e3H4WUzM3npvo5uv95QuJM3cQspFNtFBzvJ2oNjKIDQ= +github.com/acomagu/bufpipe v1.0.4/go.mod h1:mxdxdup/WdsKVreO5GpW4+M/1CE2sMG4jeGJ2sYmHc4= +github.com/ajg/form v0.0.0-20160822230020-523a5da1a92f h1:zvClvFQwU++UpIUBGC8YmDlfhUrweEy1R1Fj1gu5iIM= +github.com/ajg/form v0.0.0-20160822230020-523a5da1a92f/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY= +github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY= +github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= +github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= +github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= +github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= +github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= +github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= +github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= +github.com/aws/aws-sdk-go v1.45.27 h1:b+zOTPkAG4i2RvqPdHxkJZafmhhVaVHBp4r41Tu4I6U= +github.com/aws/aws-sdk-go v1.45.27/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= +github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= +github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= +github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cloudflare/circl v1.3.3/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA= +github.com/cloudflare/circl v1.3.5 h1:g+wWynZqVALYAlpSQFAa7TscDnUK8mKYtrxMpw6AUKo= +github.com/cloudflare/circl v1.3.5/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/creachadair/jrpc2 v1.1.2 h1:UOYMipEFYlwd5qmcvs9GZBurn3oXt1UDIX5JLjWWFzo= +github.com/creachadair/jrpc2 v1.1.2/go.mod h1:JcCe2Eny3lIvVwZLm92WXyU+tNUgTBWFCLMsfNkjEGk= +github.com/creachadair/mds v0.3.0 h1:uKbCKVtd3iOKVv3uviOm13fFNfe9qoCXJh1Vo7y3Kr0= +github.com/creachadair/mds v0.3.0/go.mod h1:4vrFYUzTXMJpMBU+OA292I6IUxKWCCfZkgXg+/kBZMo= +github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg= +github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= +github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/elazarl/goproxy v0.0.0-20230808193330-2592e75ae04a h1:mATvB/9r/3gvcejNsXKSkQ6lcIaNec2nyfOdlTBR2lU= +github.com/elazarl/goproxy v0.0.0-20230808193330-2592e75ae04a/go.mod h1:Ro8st/ElPeALwNFlcTpWmkr6IoMFfkjXAvTHpevnDsM= +github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= +github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/fatih/structs v1.0.0 h1:BrX964Rv5uQ3wwS+KRUAJCBBw5PQmgJfJ6v4yly5QwU= +github.com/fatih/structs v1.0.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= +github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= +github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= +github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY= +github.com/frankban/quicktest v1.14.4/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= +github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= +github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= +github.com/gavv/monotime v0.0.0-20161010190848-47d58efa6955 h1:gmtGRvSexPU4B1T/yYo0sLOKzER1YT+b4kPxPpm0Ty4= +github.com/gavv/monotime v0.0.0-20161010190848-47d58efa6955/go.mod h1:vmp8DIyckQMXOPl0AQVHt+7n5h7Gb7hS6CUydiV8QeA= +github.com/gliderlabs/ssh v0.3.5 h1:OcaySEmAQJgyYcArR+gGGTHCyE7nvhEMTlYY+Dp8CpY= +github.com/gliderlabs/ssh v0.3.5/go.mod h1:8XB4KraRrX39qHhT6yxPsHedjA08I/uBVwj4xC+/+z4= +github.com/go-chi/chi v4.1.2+incompatible h1:fGFk2Gmi/YKXk0OmGfBh0WgmN3XB8lVnEyNz34tQRec= +github.com/go-chi/chi v4.1.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= +github.com/go-errors/errors v1.5.1 h1:ZwEMSLRCapFLflTpT7NKaAc7ukJ8ZPEjzlxt8rPN8bk= +github.com/go-errors/errors v1.5.1/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= +github.com/go-git/go-billy/v5 v5.5.0 h1:yEY4yhzCDuMGSv83oGxiBotRzhwhNr8VZyphhiu+mTU= +github.com/go-git/go-billy/v5 v5.5.0/go.mod h1:hmexnoNsr2SJU1Ju67OaNz5ASJY3+sHgFRpCtpDCKow= +github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20230305113008-0c11038e723f h1:Pz0DHeFij3XFhoBRGUDPzSJ+w2UcK5/0JvF8DRI58r8= +github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20230305113008-0c11038e723f/go.mod h1:8LHG1a3SRW71ettAD/jW13h8c6AqjVSeL11RAdgaqpo= +github.com/go-git/go-git/v5 v5.9.0 h1:cD9SFA7sHVRdJ7AYck1ZaAa/yeuBvGPxwXDL8cxrObY= +github.com/go-git/go-git/v5 v5.9.0/go.mod h1:RKIqga24sWdMGZF+1Ekv9kylsDz6LzdTSI2s/OsZWE0= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gorp/gorp/v3 v3.1.0 h1:ItKF/Vbuj31dmV4jxA1qblpSwkl9g1typ24xoe70IGs= +github.com/go-gorp/gorp/v3 v3.1.0/go.mod h1:dLEjIyyRNiXvNZ8PSmzpt1GsWAUK8kjVhEpjH8TixEw= +github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= +github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/gobuffalo/logger v1.0.6 h1:nnZNpxYo0zx+Aj9RfMPBm+x9zAU2OayFh/xrAWi34HU= +github.com/gobuffalo/logger v1.0.6/go.mod h1:J31TBEHR1QLV2683OXTAItYIg8pv2JMHnF/quuAbMjs= +github.com/gobuffalo/packd v1.0.2 h1:Yg523YqnOxGIWCp69W12yYBKsoChwI7mtu6ceM9Bwfw= +github.com/gobuffalo/packd v1.0.2/go.mod h1:sUc61tDqGMXON80zpKGp92lDb86Km28jfvX7IAyxFT8= +github.com/gobuffalo/packr/v2 v2.8.3 h1:xE1yzvnO56cUC0sTpKR3DIbxZgB54AftTFMhB2XEWlY= +github.com/gobuffalo/packr/v2 v2.8.3/go.mod h1:0SahksCVcx4IMnigTjiFuyldmTrdTctXsOdiU5KwbKc= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-querystring v0.0.0-20160401233042-9235644dd9e5 h1:oERTZ1buOUYlpmKaqlO5fYmz8cZ1rYu5DieJzF4ZVmU= +github.com/google/go-querystring v0.0.0-20160401233042-9235644dd9e5/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= +github.com/gorilla/schema v1.2.0 h1:YufUaxZYCKGFuAq3c96BOhjgd5nmXiOY9NGzF247Tsc= +github.com/gorilla/schema v1.2.0/go.mod h1:kgLaKoK1FELgZqMAVxx/5cbj0kT+57qxUrAlIO2eleU= +github.com/guregu/null v4.0.0+incompatible h1:4zw0ckM7ECd6FNNddc3Fu4aty9nTlpkkzH7dPn4/4Gw= +github.com/guregu/null v4.0.0+incompatible/go.mod h1:ePGpQaN9cw0tj45IR5E5ehMvsFlLlQZAkkOXZurJ3NM= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/imkira/go-interpol v1.1.0 h1:KIiKr0VSG2CUW1hl1jpiyuzuJeKUUpC8iM1AIE7N1Vk= +github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/jarcoal/httpmock v0.0.0-20161210151336-4442edb3db31 h1:Aw95BEvxJ3K6o9GGv5ppCd1P8hkeIeEJ30FO+OhOJpM= +github.com/jarcoal/httpmock v0.0.0-20161210151336-4442edb3db31/go.mod h1:ks+b9deReOc7jgqp+e7LuFiCBH6Rm5hL32cLcEAArb4= +github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= +github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= +github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= +github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g= +github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/karrick/godirwalk v1.16.1 h1:DynhcF+bztK8gooS0+NDJFrdNZjJ3gzVzC545UNA9iw= +github.com/karrick/godirwalk v1.16.1/go.mod h1:j4mkqPuvaLI8mp1DroR3P6ad7cyYd4c1qeJ3RV7ULlk= +github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= +github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.17.0 h1:Rnbp4K9EjcDuVuHtd0dgA4qNuv9yKDYKK1ulpJwgrqM= +github.com/klauspost/compress v1.17.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 h1:SOEGU9fKiNWd/HOJuq6+3iTQz8KNCLtVX6idSoTLdUw= +github.com/lann/builder v0.0.0-20180802200727-47ae307949d0/go.mod h1:dXGbAdH5GtBTC4WfIxhKZfyBF/HBFgRZSWwZ9g/He9o= +github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 h1:P6pPBnrTSX3DEVR4fDembhRWSsG5rVo6hYhAB/ADZrk= +github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0/go.mod h1:vmVJ0l/dxyfGW6FmdpVm2joNMFikkuWg0EoCKLGUMNw= +github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= +github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/manucorporat/sse v0.0.0-20160126180136-ee05b128a739 h1:ykXz+pRRTibcSjG1yRhpdSHInF8yZY/mfn+Rz2Nd1rE= +github.com/manucorporat/sse v0.0.0-20160126180136-ee05b128a739/go.mod h1:zUx1mhth20V3VKgL5jbd1BSQcW4Fy6Qs4PZvQwRFwzM= +github.com/markbates/errx v1.1.0 h1:QDFeR+UP95dO12JgW+tgi2UVfo0V8YBHiUIOaeBPiEI= +github.com/markbates/errx v1.1.0/go.mod h1:PLa46Oex9KNbVDZhKel8v1OT7hD5JZ2eI7AHhA0wswc= +github.com/markbates/oncer v1.0.0 h1:E83IaVAHygyndzPimgUYJjbshhDTALZyXxvk9FOlQRY= +github.com/markbates/oncer v1.0.0/go.mod h1:Z59JA581E9GP6w96jai+TGqafHPW+cPfRxz2aSZ0mcI= +github.com/markbates/safe v1.0.1 h1:yjZkbvRM6IzKj9tlu/zMJLS0n/V351OZWRnF3QfaUxI= +github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= +github.com/matryer/is v1.2.0 h1:92UTHpy8CDwaJ08GqLDzhhuixiBUUD1p3AU6PHddz4A= +github.com/matryer/is v1.2.0/go.mod h1:2fLPjFQM9rhQ15aVEtbuwhJinnOqrmgXPNdZsdwlWXA= +github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= +github.com/mattn/go-sqlite3 v1.14.17 h1:mCRHCLDUBXgpKAqIKsaAaAsrAlbkeomtRFKXh2L6YIM= +github.com/mattn/go-sqlite3 v1.14.17/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= +github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 h1:jWpvCLoY8Z/e3VKvlsiIGKtc+UG6U5vzxaoagmhXfyg= +github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0/go.mod h1:QUyp042oQthUoa9bqDv0ER0wrtXnBruoNd7aNjkbP+k= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/moul/http2curl v0.0.0-20161031194548-4e24498b31db h1:eZgFHVkk9uOTaOQLC6tgjkzdp7Ays8eEVecBcfHZlJQ= +github.com/moul/http2curl v0.0.0-20161031194548-4e24498b31db/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ= +github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= +github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= +github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= +github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= +github.com/onsi/gomega v1.27.10 h1:naR28SdDFlqrG6kScpT8VWpu1xWY5nJRCF3XaYyBjhI= +github.com/onsi/gomega v1.27.10/go.mod h1:RsS8tutOdbdgzbPtzzATp12yT7kM5I5aElG3evPbQ0M= +github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= +github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4= +github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= +github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= +github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= +github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/poy/onpar v1.1.2 h1:QaNrNiZx0+Nar5dLgTVp5mXkyoVFIbepjyEoGSnhbAY= +github.com/poy/onpar v1.1.2/go.mod h1:6X8FLNoxyr9kkmnlqpK6LSoiOtrO6MICtWwEuWkLjzg= +github.com/prometheus/client_golang v1.17.0 h1:rl2sfwZMtSthVU752MqfjQozy7blglC+1SOtjMAMh+Q= +github.com/prometheus/client_golang v1.17.0/go.mod h1:VeL+gMmOAxkS2IqfCq0ZmHSL+LjWfWDUmp1mBz9JgUY= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw= +github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI= +github.com/prometheus/common v0.45.0 h1:2BGz0eBc2hdMDLnO/8n0jeB3oPrt2D08CekT0lneoxM= +github.com/prometheus/common v0.45.0/go.mod h1:YJmSTw9BoKxJplESWWxlbyttQR4uaEcGyv9MZjVOJsY= +github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= +github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= +github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= +github.com/rs/cors v1.10.1 h1:L0uuZVXIKlI1SShY2nhFfo44TYvDPQ1w4oFkUJNfhyo= +github.com/rs/cors v1.10.1/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= +github.com/rubenv/sql-migrate v1.5.2 h1:bMDqOnrJVV/6JQgQ/MxOpU+AdO8uzYYA/TxFUBzFtS0= +github.com/rubenv/sql-migrate v1.5.2/go.mod h1:H38GW8Vqf8F0Su5XignRyaRcbXbJunSWxs+kmzlg0Is= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/sagikazarmark/locafero v0.3.0 h1:zT7VEGWC2DTflmccN/5T1etyKvxSxpHsjb9cJvm4SvQ= +github.com/sagikazarmark/locafero v0.3.0/go.mod h1:w+v7UsPNFwzF1cHuOajOOzoq4U7v/ig1mpRjqV+Bu1U= +github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE= +github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= +github.com/segmentio/go-loggly v0.5.1-0.20171222203950-eb91657e62b2 h1:S4OC0+OBKz6mJnzuHioeEat74PuQ4Sgvbf8eus695sc= +github.com/segmentio/go-loggly v0.5.1-0.20171222203950-eb91657e62b2/go.mod h1:8zLRYR5npGjaOXgPSKat5+oOh+UHd8OdbS18iqX9F6Y= +github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8= +github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I= +github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/skeema/knownhosts v1.2.1 h1:SHWdIUa82uGZz+F+47k8SY4QhhI291cXCpopT1lK2AQ= +github.com/skeema/knownhosts v1.2.1/go.mod h1:xYbVRSPxqBZFrdmDyMmsOs+uX1UZC3nTN3ThzgDxUwo= +github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= +github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= +github.com/spf13/afero v1.10.0 h1:EaGW2JJh15aKOejeuJ+wpFSHnbd7GE6Wvp3TsNhb6LY= +github.com/spf13/afero v1.10.0/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ= +github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA= +github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48= +github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I= +github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.17.0 h1:I5txKw7MJasPL/BrfkbA0Jyo/oELqVmux4pR/UxOMfI= +github.com/spf13/viper v1.17.0/go.mod h1:BmMMMLQXSbcHK6KAOiFLz0l5JHrU89OdIRHvsk0+yVI= +github.com/stellar/go v0.0.0-20240109175136-3ca501f09055 h1:6/i5f/4CsoArb9eNe+Pr+ATQkBvWNK31at6qaw9zMH4= +github.com/stellar/go v0.0.0-20240109175136-3ca501f09055/go.mod h1:PAWie4LYyDzJXqDVG4Qcj1Nt+uNk7sjzgSCXndQYsBA= +github.com/stellar/go-xdr v0.0.0-20231122183749-b53fb00bcac2 h1:OzCVd0SV5qE3ZcDeSFCmOWLZfEWZ3Oe8KtmSOYKEVWE= +github.com/stellar/go-xdr v0.0.0-20231122183749-b53fb00bcac2/go.mod h1:yoxyU/M8nl9LKeWIoBrbDPQ7Cy+4jxRcWcOayZ4BMps= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.1 h1:4VhoImhV/Bm0ToFkXFi8hXNXwpDRZ/ynw3amt82mzq0= +github.com/stretchr/objx v0.5.1/go.mod h1:/iHQpkQwBD6DLUmQ4pE+s1TXdob1mORJ4/UFdrifcy0= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= +github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= +github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/valyala/fasthttp v1.34.0 h1:d3AAQJ2DRcxJYHm7OXNXtXt2as1vMDfxeIcFvhmGGm4= +github.com/valyala/fasthttp v1.34.0/go.mod h1:epZA5N+7pY6ZaEKRmstzOuYJx9HI8DI1oaCGZpdH4h0= +github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= +github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= +github.com/xdrpp/goxdr v0.1.1 h1:E1B2c6E8eYhOVyd7yEpOyopzTPirUeF6mVOfXfGyJyc= +github.com/xdrpp/goxdr v0.1.1/go.mod h1:dXo1scL/l6s7iME1gxHWo2XCppbHEKZS7m/KyYWkNzA= +github.com/xeipuuv/gojsonpointer v0.0.0-20151027082146-e0fe6f683076 h1:KM4T3G70MiR+JtqplcYkNVoNz7pDwYaBxWBXQK804So= +github.com/xeipuuv/gojsonpointer v0.0.0-20151027082146-e0fe6f683076/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20150808065054-e02fc20de94c h1:XZWnr3bsDQWAZg4Ne+cPoXRPILrNlPNQfxBuwLl43is= +github.com/xeipuuv/gojsonreference v0.0.0-20150808065054-e02fc20de94c/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v0.0.0-20161231055540-f06f290571ce h1:cVSRGH8cOveJNwFEEZLXtB+XMnRqKLjUP6V/ZFYQCXI= +github.com/xeipuuv/gojsonschema v0.0.0-20161231055540-f06f290571ce/go.mod h1:5yf86TLmAcydyeJq5YvxkGPE2fm/u4myDekKRoLuqhs= +github.com/yalp/jsonpath v0.0.0-20150812003900-31a79c7593bb h1:06WAhQa+mYv7BiOk13B/ywyTlkoE/S7uu6TBKU6FHnE= +github.com/yalp/jsonpath v0.0.0-20150812003900-31a79c7593bb/go.mod h1:/LWChgwKmvncFJFHJ7Gvn9wZArjbV5/FppcK2fKk/tI= +github.com/yudai/gojsondiff v0.0.0-20170107030110-7b1b7adf999d h1:yJIizrfO599ot2kQ6Af1enICnwBD3XoxgX3MrMwot2M= +github.com/yudai/gojsondiff v0.0.0-20170107030110-7b1b7adf999d/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg= +github.com/yudai/golcs v0.0.0-20150405163532-d1c525dea8ce h1:888GrqRxabUce7lj4OaoShPxodm3kXOMpSa85wdYzfY= +github.com/yudai/golcs v0.0.0-20150405163532-d1c525dea8ce/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= +go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= +golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= +golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= +golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI= +golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.13.0 h1:I/DsJXRlw/8l/0c24sM9yb0T4z9liZTduXvdAWYiysY= +golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= +golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= +golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= +golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE= +golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= +golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= +golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= +golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.14.0 h1:jvNa2pY0M4r62jkRQ6RwEZZyPcymeL9XZMLBbV7U2nc= +golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= +google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/gavv/httpexpect.v1 v1.0.0-20170111145843-40724cf1e4a0 h1:r5ptJ1tBxVAeqw4CrYWhXIMr0SybY3CDHuIbCg5CFVw= +gopkg.in/gavv/httpexpect.v1 v1.0.0-20170111145843-40724cf1e4a0/go.mod h1:WtiW9ZA1LdaWqtQRo1VbIL/v4XZ8NDta+O/kSpGgVek= +gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= +gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/tylerb/graceful.v1 v1.2.15 h1:1JmOyhKqAyX3BgTXMI84LwT6FOJ4tP2N9e2kwTCM0nQ= +gopkg.in/tylerb/graceful.v1 v1.2.15/go.mod h1:yBhekWvR20ACXVObSSdD3u6S9DeSylanL2PAbAC/uJ8= +gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= +gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools/v3 v3.5.0 h1:Ljk6PdHdOhAb5aDMWXjDLMMhph+BpztA4v1QdqEW2eY= +gotest.tools/v3 v3.5.0/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/install_githooks.sh b/install_githooks.sh new file mode 100755 index 00000000..8a93968d --- /dev/null +++ b/install_githooks.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +cd "$( dirname "${BASH_SOURCE[0]}" )" + +cp .cargo-husky/hooks/* .git/hooks/ \ No newline at end of file diff --git a/rust-toolchain.toml b/rust-toolchain.toml new file mode 100644 index 00000000..e340b764 --- /dev/null +++ b/rust-toolchain.toml @@ -0,0 +1,4 @@ +[toolchain] +channel = "stable" +targets = ["wasm32-unknown-unknown"] +components = ["rustc", "cargo", "rustfmt", "clippy", "rust-src"] diff --git a/scripts/check-dependencies.bash b/scripts/check-dependencies.bash new file mode 100755 index 00000000..7415e395 --- /dev/null +++ b/scripts/check-dependencies.bash @@ -0,0 +1,112 @@ +#!/bin/bash + +set -e + +SED=sed +if [ -z "$(sed --version 2>&1 | grep GNU)" ]; then + SED=gsed +fi + +CURL="curl -sL --fail-with-body" + +if ! CARGO_OUTPUT=$(cargo tree -p soroban-env-host 2>&1); then + echo "The project depends on multiple versions of the soroban-env-host Rust library, please unify them." + echo "Make sure the soroban-sdk dependency indirectly points to the same soroban-env-host dependency imported explicitly." + echo + echo "This is soroban-env-host version imported by soroban-sdk:" + cargo tree --depth 1 -p soroban-sdk | grep env-host + echo + echo + echo + echo "Full error:" + echo $CARGO_OUTPUT + exit 1 +fi + + +# revision of the https://github.com/stellar/rs-stellar-xdr library used by the Rust code +RS_STELLAR_XDR_REVISION="" + +# revision of https://github.com/stellar/stellar-xdr/ used by the Rust code +STELLAR_XDR_REVISION_FROM_RUST="" + +function stellar_xdr_version_from_rust_dep_tree { + LINE=$(grep stellar-xdr | head -n 1) + # try to obtain a commit + COMMIT=$(echo $LINE | $SED -n 's/.*rev=\(.*\)#.*/\1/p') + if [ -n "$COMMIT" ]; then + echo "$COMMIT" + return + fi + # obtain a crate version + echo $LINE | $SED -n 's/.*stellar-xdr \(v\)\{0,1\}\([^ ]*\).*/\2/p' +} + +if CARGO_OUTPUT=$(cargo tree --depth 0 -p stellar-xdr 2>&1); then + RS_STELLAR_XDR_REVISION=$(echo "$CARGO_OUTPUT" | stellar_xdr_version_from_rust_dep_tree) + if [ ${#RS_STELLAR_XDR_REVISION} -eq 40 ]; then + # revision is a git hash + STELLAR_XDR_REVISION_FROM_RUST=$($CURL https://raw.githubusercontent.com/stellar/rs-stellar-xdr/${RS_STELLAR_XDR_REVISION}/xdr/curr-version) + else + # revision is a crate version + CARGO_SRC_BASE_DIR=$(realpath ${CARGO_HOME:-$HOME/.cargo}/registry/src/index*) + STELLAR_XDR_REVISION_FROM_RUST=$(cat "${CARGO_SRC_BASE_DIR}/stellar-xdr-${RS_STELLAR_XDR_REVISION}/xdr/curr-version") + fi +else + echo "The project depends on multiple versions of the Rust rs-stellar-xdr library" + echo "Make sure a single version of stellar-xdr is used" + echo + echo + echo + echo "Full error:" + echo $CARGO_OUTPUT +fi + +# Now, lets compare the Rust and Go XDR revisions +# TODO: The sed extraction below won't work for version tags +GO_XDR_REVISION=$(go list -m -f '{{.Version}}' github.com/stellar/go | $SED 's/.*-\(.*\)/\1/') + +# revision of https://github.com/stellar/stellar-xdr/ used by the Go code +STELLAR_XDR_REVISION_FROM_GO=$($CURL https://raw.githubusercontent.com/stellar/go/${GO_XDR_REVISION}/xdr/xdr_commit_generated.txt) + +if [ "$STELLAR_XDR_REVISION_FROM_GO" != "$STELLAR_XDR_REVISION_FROM_RUST" ]; then + echo "Go and Rust dependencies are using different revisions of https://github.com/stellar/stellar-xdr" + echo + echo "Rust dependencies are using commit $STELLAR_XDR_REVISION_FROM_RUST" + echo "Go dependencies are using commit $STELLAR_XDR_REVISION_FROM_GO" + exit 1 +fi + +# Now, lets make sure that the core and captive core version used in the tests use the same version and that they depend +# on the same XDR revision + +# TODO: The sed extractions below won't work when the commit is not included in the Core image tag/debian packages version +CORE_CONTAINER_REVISION=$($SED -n 's/.*\/\(stellar-core\|unsafe-stellar-core\(-next\)\{0,1\}\)\:.*\..*-[^\.]*\.\(.*\)\..*/\3/p' < cmd/soroban-rpc/internal/test/docker-compose.yml) +CAPTIVE_CORE_PKG_REVISION=$($SED -n 's/.*DEBIAN_PKG_VERSION:.*\..*-[^\.]*\.\(.*\)\..*/\1/p' < .github/workflows/soroban-rpc.yml) + +if [ "$CORE_CONTAINER_REVISION" != "$CAPTIVE_CORE_PKG_REVISION" ]; then + echo "Soroban RPC integration tests are using different versions of the Core container and Captive Core Debian package." + echo + echo "Core container image commit $CORE_CONTAINER_REVISION" + echo "Captive core debian package commit $CAPTIVE_CORE_PKG_REVISION" + exit 1 +fi + +# Revision of https://github.com/stellar/rs-stellar-xdr by Core. +# We obtain it from src/rust/src/host-dep-tree-curr.txt but Alternatively/in addition we could: +# * Check the rs-stellar-xdr revision of host-dep-tree-prev.txt +# * Check the stellar-xdr revision +CORE_HOST_DEP_TREE_CURR=$($CURL https://raw.githubusercontent.com/stellar/stellar-core/${CORE_CONTAINER_REVISION}/src/rust/src/host-dep-tree-curr.txt) + + +RS_STELLAR_XDR_REVISION_FROM_CORE=$(echo "$CORE_HOST_DEP_TREE_CURR" | stellar_xdr_version_from_rust_dep_tree) +if [ "$RS_STELLAR_XDR_REVISION" != "$RS_STELLAR_XDR_REVISION_FROM_CORE" ]; then + echo "The Core revision used in integration tests (${CORE_CONTAINER_REVISION}) uses a different revision of https://github.com/stellar/rs-stellar-xdr" + echo + echo "Current repository's revision $RS_STELLAR_XDR_REVISION" + echo "Core's revision $RS_STELLAR_XDR_REVISION_FROM_CORE" + exit 1 +fi + + +