diff --git a/.clippy.toml b/.clippy.toml index eb66960a..ee57de74 100644 --- a/.clippy.toml +++ b/.clippy.toml @@ -1 +1,12 @@ -msrv = "1.46" +allow-print-in-tests = true +allow-expect-in-tests = true +allow-unwrap-in-tests = true +allow-dbg-in-tests = true +# disallowed-methods = [ +# { path = "std::option::Option::map_or", reason = "prefer `map(..).unwrap_or(..)` for legibility" }, +# { path = "std::option::Option::map_or_else", reason = "prefer `map(..).unwrap_or_else(..)` for legibility" }, +# { path = "std::result::Result::map_or", reason = "prefer `map(..).unwrap_or(..)` for legibility" }, +# { path = "std::result::Result::map_or_else", reason = "prefer `map(..).unwrap_or_else(..)` for legibility" }, +# { path = "std::iter::Iterator::for_each", reason = "prefer `for` for side-effects" }, +# { path = "std::iter::Iterator::try_for_each", reason = "prefer `for` for side-effects" }, +# ] diff --git a/.github/dependabot.yml b/.github/dependabot.yml deleted file mode 100644 index 8ef5278d..00000000 --- a/.github/dependabot.yml +++ /dev/null @@ -1,10 +0,0 @@ -version: 2 -updates: -- package-ecosystem: cargo - directory: "/" - schedule: - interval: daily -- package-ecosystem: github-actions - directory: "/" - schedule: - interval: daily diff --git a/.github/renovate.json5 b/.github/renovate.json5 new file mode 100644 index 00000000..7ab13b9f --- /dev/null +++ b/.github/renovate.json5 @@ -0,0 +1,109 @@ +{ + schedule: [ + 'before 5am on the first day of the month', + ], + semanticCommits: 'enabled', + commitMessageLowerCase: 'never', + configMigration: true, + dependencyDashboard: true, + customManagers: [ + { + customType: 'regex', + fileMatch: [ + '^rust-toolchain\\.toml$', + 'Cargo.toml$', + 'clippy.toml$', + '\\.clippy.toml$', + '^\\.github/workflows/ci.yml$', + '^\\.github/workflows/rust-next.yml$', + ], + matchStrings: [ + 'STABLE.*?(?\\d+\\.\\d+(\\.\\d+)?)', + '(?\\d+\\.\\d+(\\.\\d+)?).*?STABLE', + ], + depNameTemplate: 'STABLE', + packageNameTemplate: 'rust-lang/rust', + datasourceTemplate: 'github-releases', + }, + ], + packageRules: [ + { + commitMessageTopic: 'Rust Stable', + matchManagers: [ + 'custom.regex', + ], + matchDepNames: [ + 'STABLE', + ], + extractVersion: '^(?\\d+\\.\\d+)', // Drop the patch version + schedule: [ + '* * * * *', + ], + automerge: true, + }, + // Goals: + // - Keep version reqs low, ignoring compatible normal/build dependencies + // - Take advantage of latest dev-dependencies + // - Rollup safe upgrades to reduce CI runner load + // - Help keep number of versions down by always using latest breaking change + // - Have lockfile and manifest in-sync + { + matchManagers: [ + 'cargo', + ], + matchDepTypes: [ + 'build-dependencies', + 'dependencies', + ], + matchCurrentVersion: '>=0.1.0', + matchUpdateTypes: [ + 'patch', + ], + enabled: false, + }, + { + matchManagers: [ + 'cargo', + ], + matchDepTypes: [ + 'build-dependencies', + 'dependencies', + ], + matchCurrentVersion: '>=1.0.0', + matchUpdateTypes: [ + 'minor', + 'patch', + ], + enabled: false, + }, + { + matchManagers: [ + 'cargo', + ], + matchDepTypes: [ + 'dev-dependencies', + ], + matchCurrentVersion: '>=0.1.0', + matchUpdateTypes: [ + 'patch', + ], + automerge: true, + groupName: 'compatible (dev)', + }, + { + matchManagers: [ + 'cargo', + ], + matchDepTypes: [ + 'dev-dependencies', + ], + matchCurrentVersion: '>=1.0.0', + matchUpdateTypes: [ + 'minor', + 'patch', + ], + automerge: true, + groupName: 'compatible (dev)', + }, + ], +} diff --git a/.github/settings.yml b/.github/settings.yml new file mode 100644 index 00000000..c2401a07 --- /dev/null +++ b/.github/settings.yml @@ -0,0 +1,59 @@ +# These settings are synced to GitHub by https://probot.github.io/apps/settings/ + +repository: + description: "⚙️ Layered configuration system for Rust applications (with strong support for 12-factor applications). " + homepage: "https://docs.rs/config/latest/config/" + topics: "" + has_issues: true + has_projects: false + has_wiki: false + has_downloads: true + default_branch: main + + # Preference: people do clean commits + allow_merge_commit: true + # Backup in case we need to clean up commits + allow_squash_merge: true + # Not really needed + allow_rebase_merge: false + + allow_auto_merge: true + delete_branch_on_merge: true + + squash_merge_commit_title: "PR_TITLE" + squash_merge_commit_message: "PR_BODY" + merge_commit_message: "PR_BODY" + +# labels: +# # Type +# - name: bug +# color: '#b60205' +# description: "Not as expected" +# - name: enhancement +# color: '#1d76db' +# description: "Improve the expected" +# # Flavor +# - name: question +# color: "#cc317c" +# description: "Uncertainty is involved" +# - name: breaking-change +# color: "#e99695" +# - name: good first issue +# color: '#c2e0c6' +# description: "Help wanted!" + +# This serves more as documentation. +# Branch protection API was replaced by rulesets but settings isn't updated. +# See https://github.com/repository-settings/app/issues/825 +# +# branches: +# - name: main +# protection: +# required_pull_request_reviews: null +# required_conversation_resolution: true +# required_status_checks: +# # Required. Require branches to be up to date before merging. +# strict: false +# contexts: ["CI", "Spell Check with Typos"] +# enforce_admins: false +# restrictions: null diff --git a/.github/workflows/audit.yml b/.github/workflows/audit.yml new file mode 100644 index 00000000..a94be159 --- /dev/null +++ b/.github/workflows/audit.yml @@ -0,0 +1,53 @@ +name: Security audit + +permissions: + contents: read + +on: + pull_request: + paths: + - '**/Cargo.toml' + - '**/Cargo.lock' + push: + branches: + - main + +env: + RUST_BACKTRACE: 1 + CARGO_TERM_COLOR: always + CLICOLOR: 1 + +concurrency: + group: "${{ github.workflow }}-${{ github.ref }}" + cancel-in-progress: true + +jobs: + security_audit: + permissions: + issues: write # to create issues (actions-rs/audit-check) + checks: write # to create check (actions-rs/audit-check) + runs-on: ubuntu-latest + # Prevent sudden announcement of a new advisory from failing ci: + continue-on-error: true + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - uses: actions-rs/audit-check@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + cargo_deny: + permissions: + issues: write # to create issues (actions-rs/audit-check) + checks: write # to create check (actions-rs/audit-check) + runs-on: ubuntu-latest + strategy: + matrix: + checks: + - bans licenses sources + steps: + - uses: actions/checkout@v4 + - uses: EmbarkStudios/cargo-deny-action@v2 + with: + command: check ${{ matrix.checks }} + rust-version: stable diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..3b0b0699 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,183 @@ +name: CI + +permissions: + contents: read + +on: + pull_request: + push: + branches: + - main + +env: + RUST_BACKTRACE: 1 + CARGO_TERM_COLOR: always + CLICOLOR: 1 + +concurrency: + group: "${{ github.workflow }}-${{ github.ref }}" + cancel-in-progress: true + +jobs: + ci: + permissions: + contents: none + name: CI + needs: [test, msrv, lockfile, docs, rustfmt, clippy, minimal-versions] + runs-on: ubuntu-latest + if: "always()" + steps: + - name: Failed + run: exit 1 + if: "contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped')" + test: + name: Test + strategy: + matrix: + os: ["ubuntu-latest", "windows-latest", "macos-latest"] + rust: ["stable"] + continue-on-error: ${{ matrix.rust != 'stable' }} + runs-on: ${{ matrix.os }} + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + with: + toolchain: ${{ matrix.rust }} + - uses: Swatinem/rust-cache@v2 + - uses: taiki-e/install-action@cargo-hack + - name: Build + run: cargo test --workspace --no-run + - name: Default features + run: cargo test --workspace + - name: All features + run: cargo test --workspace --all-features + - name: No-default features + run: cargo test --workspace --no-default-features + msrv: + name: "Check MSRV" + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + with: + toolchain: stable + - uses: Swatinem/rust-cache@v2 + - uses: taiki-e/install-action@cargo-hack + - name: Default features + run: cargo hack check --feature-powerset --locked --rust-version --ignore-private --workspace --all-targets + minimal-versions: + name: Minimal versions + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install stable Rust + uses: dtolnay/rust-toolchain@stable + with: + toolchain: stable + - name: Install nightly Rust + uses: dtolnay/rust-toolchain@stable + with: + toolchain: nightly + - name: Downgrade dependencies to minimal versions + run: cargo +nightly generate-lockfile -Z minimal-versions + - name: Hack around bad deps + run: cargo update --recursive json5 + - name: Compile with minimal versions + run: cargo +stable check --workspace --all-features --locked + lockfile: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + with: + toolchain: stable + - uses: Swatinem/rust-cache@v2 + - name: "Is lockfile updated?" + run: cargo update --workspace --locked + docs: + name: Docs + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + with: + toolchain: "1.76" # STABLE + - uses: Swatinem/rust-cache@v2 + - name: Check documentation + env: + RUSTDOCFLAGS: -D warnings + run: cargo doc --workspace --all-features --no-deps --document-private-items + rustfmt: + name: rustfmt + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + with: + toolchain: "1.76" # STABLE + components: rustfmt + - uses: Swatinem/rust-cache@v2 + - name: Check formatting + run: cargo fmt --all -- --check + clippy: + name: clippy + runs-on: ubuntu-latest + permissions: + security-events: write # to upload sarif results + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + with: + toolchain: "1.76" # STABLE + components: clippy + - uses: Swatinem/rust-cache@v2 + - name: Install SARIF tools + run: cargo install clippy-sarif --locked + - name: Install SARIF tools + run: cargo install sarif-fmt --locked + - name: Check + run: > + cargo clippy --workspace --all-features --all-targets --message-format=json -- -D warnings --allow deprecated + | clippy-sarif + | tee clippy-results.sarif + | sarif-fmt + continue-on-error: true + - name: Upload + uses: github/codeql-action/upload-sarif@v3 + with: + sarif_file: clippy-results.sarif + wait-for-processing: true + - name: Report status + run: cargo clippy --workspace --all-features --all-targets -- -D warnings --allow deprecated + coverage: + name: Coverage + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + with: + toolchain: stable + - uses: Swatinem/rust-cache@v2 + - name: Install cargo-tarpaulin + run: cargo install cargo-tarpaulin + - name: Gather coverage + run: cargo tarpaulin --output-dir coverage --out lcov + - name: Publish to Coveralls + uses: coverallsapp/github-action@master + with: + github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/committed.yml b/.github/workflows/committed.yml new file mode 100644 index 00000000..e7a50fbb --- /dev/null +++ b/.github/workflows/committed.yml @@ -0,0 +1,28 @@ +# Not run as part of pre-commit checks because they don't handle sending the correct commit +# range to `committed` +name: Lint Commits +on: [pull_request] + +permissions: + contents: read + +env: + RUST_BACKTRACE: 1 + CARGO_TERM_COLOR: always + CLICOLOR: 1 + +concurrency: + group: "${{ github.workflow }}-${{ github.ref }}" + cancel-in-progress: true + +jobs: + committed: + name: Lint Commits + runs-on: ubuntu-latest + steps: + - name: Checkout Actions Repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Lint Commits + uses: crate-ci/committed@master diff --git a/.github/workflows/cron.yml b/.github/workflows/cron.yml deleted file mode 100644 index 57e58a20..00000000 --- a/.github/workflows/cron.yml +++ /dev/null @@ -1,51 +0,0 @@ -name: Cron - -on: - schedule: - # every friday at 10:00 am - - cron: '0 10 * * 5' - -jobs: - check: - name: Check - runs-on: ubuntu-latest - strategy: - matrix: - rust: - - 1.70.0 - - stable - - steps: - - name: Checkout sources - uses: actions/checkout@v4.1.1 - - - name: Install toolchain - uses: dtolnay/rust-toolchain@master - with: - toolchain: ${{ matrix.rust }} - - - name: Run cargo check - run: cargo check --all --all-features --examples - - clippy: - needs: [check] - name: Clippy - runs-on: ubuntu-latest - strategy: - matrix: - rust: - - stable - - steps: - - name: Checkout sources - uses: actions/checkout@v4.1.1 - - - name: Install toolchain - uses: dtolnay/rust-toolchain@master - with: - toolchain: ${{ matrix.rust }} - components: clippy - - - name: Run cargo check - run: cargo clippy --all --all-features -- -D warnings - diff --git a/.github/workflows/external-types.yml b/.github/workflows/external-types.yml deleted file mode 100644 index 317c7529..00000000 --- a/.github/workflows/external-types.yml +++ /dev/null @@ -1,22 +0,0 @@ -on: [push, pull_request] - -name: Check-external-types - -jobs: - check: - name: Check - runs-on: ubuntu-latest - steps: - - name: Checkout sources - uses: actions/checkout@v4.1.1 - - - name: Install toolchain - uses: dtolnay/rust-toolchain@master - with: - toolchain: nightly-2024-02-07 - - - name: Install cargo-check-external-types - run: cargo install --locked cargo-check-external-types - - - name: Run cargo check-external-types - run: cargo check-external-types --config ./external-types.toml diff --git a/.github/workflows/gitlint.yml b/.github/workflows/gitlint.yml deleted file mode 100644 index 051d9b34..00000000 --- a/.github/workflows/gitlint.yml +++ /dev/null @@ -1,16 +0,0 @@ -on: [pull_request] - -name: Git Checks - -jobs: - commit-lint: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4.1.1 - with: - fetch-depth: 0 - - uses: actions/setup-python@v5 - with: - python-version: '3.x' - - run: pip install gitlint - - run: gitlint --commits $(git merge-base origin/master HEAD)..HEAD diff --git a/.github/workflows/msrv.yml b/.github/workflows/msrv.yml deleted file mode 100644 index ab61334e..00000000 --- a/.github/workflows/msrv.yml +++ /dev/null @@ -1,79 +0,0 @@ -on: [push, pull_request] - -name: MSRV - -jobs: - check: - name: Check - runs-on: ubuntu-latest - steps: - - name: Checkout sources - uses: actions/checkout@v4.1.1 - - - name: Install toolchain - uses: dtolnay/rust-toolchain@master - with: - toolchain: 1.75.0 - - - name: Cache - uses: Swatinem/rust-cache@v2 - - - name: Run cargo check - run: cargo check --all-features --examples --tests - - test: - name: Test Suite - runs-on: ubuntu-latest - steps: - - name: Checkout sources - uses: actions/checkout@v4.1.1 - - - name: Install toolchain - uses: dtolnay/rust-toolchain@master - with: - toolchain: 1.75.0 - - - name: Cache - uses: Swatinem/rust-cache@v2 - - - name: Run cargo test - run: cargo test --all-features - - fmt: - needs: [check] - name: Rustfmt - runs-on: ubuntu-latest - steps: - - name: Checkout sources - uses: actions/checkout@v4.1.1 - - - name: Install toolchain - uses: dtolnay/rust-toolchain@master - with: - toolchain: 1.75.0 - components: rustfmt - - - name: Run cargo fmt - continue-on-error: ${{ matrix.rust == 'beta' }} - run: cargo fmt --all -- --check - - clippy: - needs: [check] - name: Clippy - runs-on: ubuntu-latest - steps: - - name: Checkout sources - uses: actions/checkout@v4.1.1 - - - name: Install toolchain - uses: dtolnay/rust-toolchain@master - with: - toolchain: 1.75.0 - components: clippy - - - name: Cache - uses: Swatinem/rust-cache@v2 - - - name: Run cargo clippy - run: cargo clippy --all-targets --all-features -- -D warnings - diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml new file mode 100644 index 00000000..7b55a3d9 --- /dev/null +++ b/.github/workflows/pre-commit.yml @@ -0,0 +1,29 @@ +name: pre-commit + +permissions: {} # none + +on: + pull_request: + push: + branches: [main] + +env: + RUST_BACKTRACE: 1 + CARGO_TERM_COLOR: always + CLICOLOR: 1 + +concurrency: + group: "${{ github.workflow }}-${{ github.ref }}" + cancel-in-progress: true + +jobs: + pre-commit: + permissions: + contents: read + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: '3.x' + - uses: pre-commit/action@v3.0.1 diff --git a/.github/workflows/rust-next.yml b/.github/workflows/rust-next.yml new file mode 100644 index 00000000..ad4b4514 --- /dev/null +++ b/.github/workflows/rust-next.yml @@ -0,0 +1,65 @@ +name: rust-next + +permissions: + contents: read + +on: + schedule: + - cron: '1 1 1 * *' + +env: + RUST_BACKTRACE: 1 + CARGO_TERM_COLOR: always + CLICOLOR: 1 + +concurrency: + group: "${{ github.workflow }}-${{ github.ref }}" + cancel-in-progress: true + +jobs: + test: + name: Test + strategy: + matrix: + os: ["ubuntu-latest", "windows-latest", "macos-latest"] + rust: ["stable", "beta"] + include: + - os: ubuntu-latest + rust: "nightly" + continue-on-error: ${{ matrix.rust != 'stable' }} + runs-on: ${{ matrix.os }} + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + with: + toolchain: ${{ matrix.rust }} + - uses: Swatinem/rust-cache@v2 + - uses: taiki-e/install-action@cargo-hack + - name: Build + run: cargo test --workspace --no-run + - name: Default features + run: cargo test --workspace + - name: All features + run: cargo test --workspace --all-features + - name: No-default features + run: cargo test --workspace --no-default-features + latest: + name: "Check latest dependencies" + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + with: + toolchain: stable + - uses: Swatinem/rust-cache@v2 + - uses: taiki-e/install-action@cargo-hack + - name: Update dependencies + run: cargo update + - name: Build + run: cargo test --workspace --no-run + - name: Test + run: cargo hack test --feature-powerset --workspace diff --git a/.github/workflows/spelling.yml b/.github/workflows/spelling.yml new file mode 100644 index 00000000..8e58d9ec --- /dev/null +++ b/.github/workflows/spelling.yml @@ -0,0 +1,25 @@ +name: Spelling + +permissions: + contents: read + +on: [pull_request] + +env: + RUST_BACKTRACE: 1 + CARGO_TERM_COLOR: always + CLICOLOR: 1 + +concurrency: + group: "${{ github.workflow }}-${{ github.ref }}" + cancel-in-progress: true + +jobs: + spelling: + name: Spell Check with Typos + runs-on: ubuntu-latest + steps: + - name: Checkout Actions Repository + uses: actions/checkout@v4 + - name: Spell Check Repo + uses: crate-ci/typos@master diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..45a08eca --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,18 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: check-merge-conflict + stages: [commit] + - id: check-case-conflict + stages: [commit] + - repo: https://github.com/crate-ci/typos + rev: v1.26.0 + hooks: + - id: typos + stages: [commit] + - repo: https://github.com/crate-ci/committed + rev: v1.0.20 + hooks: + - id: committed + stages: [commit-msg] diff --git a/CHANGELOG.md b/CHANGELOG.md index fe41b827..4c1a325d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,8 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). -## Unreleased + +## [Unreleased] - ReleaseDate ## 0.14.0 - 2024-02-01 @@ -27,7 +28,7 @@ also with some fixes and changes that have piled up over a rather long time. - [#334] errors: clarify names of integer types - [#343] fix yaml to parse int keys - [#353] Use TryInto for more permissive deserialization for integers - - [#354] Fix uppercase lowercase isses + - [#354] Fix uppercase lowercase issues - [#374] Fix FIXME in de.rs and value.rs - [#395] Fix: Do not use deprecated function - [#429] Make the parse list key to lowercase when insert the keys @@ -233,7 +234,7 @@ update its MSRV. ## 0.13.0 - 2022-04-03 - - Prefix-Seperator support was added [#292] + - Prefix-Separator support was added [#292] - Environment lists can now be parsed [#255] - Setting an overwrite from an Option was added [#303] - Option to keep the prefix from an environment variable was added [#298] @@ -470,3 +471,6 @@ update its MSRV. ## 0.2.0 - 2017-01-29 Initial release. + + +[Unreleased]: https://github.com/rust-cli/config-rs/compare/0.14.0...HEAD diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d4f11dfc..0a73111b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,25 +1,84 @@ -Developer's Certificate of Origin 1.1 - -By making a contribution to this project, I certify that: - -(a) The contribution was created in whole or in part by me and I - have the right to submit it under the open source license - indicated in the file; or - -(b) The contribution is based upon previous work that, to the best - of my knowledge, is covered under an appropriate open source - license and I have the right under that license to submit that - work with modifications, whether created in whole or in part - by me, under the same open source license (unless I am - permitted to submit under a different license), as indicated - in the file; or - -(c) The contribution was provided directly to me by some other - person who certified (a), (b) or (c) and I have not modified - it. - -(d) I understand and agree that this project and the contribution - are public and that a record of the contribution (including all - personal information I submit with it, including my sign-off) is - maintained indefinitely and may be redistributed consistent with - this project or the open source license(s) involved. +# Contributing to config-rs + +Thanks for wanting to contribute! There are many ways to contribute and we +appreciate any level you're willing to do. + +## Feature Requests + +Need some new functionality to help? You can let us know by opening an +[issue][new issue]. It's helpful to look through [all issues][all issues] in +case its already being talked about. + +## Bug Reports + +Please let us know about what problems you run into, whether in behavior or +ergonomics of API. You can do this by opening an [issue][new issue]. It's +helpful to look through [all issues][all issues] in case its already being +talked about. + +## Pull Requests + +Looking for an idea? Check our [issues][issues]. If it's look more open ended, +it is probably best to post on the issue how you are thinking of resolving the +issue so you can get feedback early in the process. We want you to be +successful and it can be discouraging to find out a lot of re-work is needed. + +Already have an idea? It might be good to first [create an issue][new issue] +to propose it so we can make sure we are aligned and lower the risk of having +to re-work some of it and the discouragement that goes along with that. + +### Process + +As a heads up, we'll be running your PR through the following gauntlet: +- warnings turned to compile errors +- `cargo test` +- `rustfmt` +- `clippy` +- `rustdoc` +- [`committed`](https://github.com/crate-ci/committed) as we use [Conventional](https://www.conventionalcommits.org) commit style +- [`typos`](https://github.com/crate-ci/typos) to check spelling + +Not everything can be checked automatically though. + +We request that the commit history gets cleaned up. +We ask that commits are atomic, meaning they are complete and have a single responsibility. +PRs should tell a cohesive story, with test and refactor commits that keep the +fix or feature commits simple and clear. + +Specifically, we would encourage +- File renames be isolated into their own commit +- Add tests in a commit before their feature or fix, showing the current behavior. + The diff for the feature/fix commit will then show how the behavior changed, + making it clearer to reviewrs and the community and showing people that the + test is verifying the expected state. + - e.g. [clap#5520](https://github.com/clap-rs/clap/pull/5520) + +Note that we are talking about ideals. +We understand having a clean history requires more advanced git skills; +feel free to ask us for help! +We might even suggest where it would work to be lax. +We also understand that editing some early commits may cause a lot of churn +with merge conflicts which can make it not worth editing all of the history. + +For code organization, we recommend +- Grouping `impl` blocks next to their type (or trait) +- Grouping private items after the `pub` item that uses them. + - The intent is to help people quickly find the "relevant" details, allowing them to "dig deeper" as needed. Or put another way, the `pub` items serve as a table-of-contents. + - The exact order is fuzzy; do what makes sense + +## Releasing + +Pre-requisites +- Running `cargo login` +- A member of `rust-cli:Maintainers` +- Push permission to the repo +- [`cargo-release`](https://github.com/crate-ci/cargo-release/) + +When we're ready to release, a project owner should do the following +1. Update the changelog (see `cargo release changes` for ideas) +2. Determine what the next version is, according to semver +3. Run [`cargo release -x `](https://github.com/crate-ci/cargo-release) + +[issues]: https://github.com/rust-cli/config-rs/issues +[new issue]: https://github.com/rust-cli/config-rs/issues/new +[all issues]: https://github.com/rust-cli/config-rs/issues?utf8=%E2%9C%93&q=is%3Aissue diff --git a/Cargo.toml b/Cargo.toml index b47d3eb5..93f39c7c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,18 +1,114 @@ +[workspace] +resolver = "2" + +[workspace.package] +repository = "https://github.com/rust-cli/config-rs" +license = "MIT OR Apache-2.0" +edition = "2018" +rust-version = "1.75.0" # MSRV +include = [ + "build.rs", + "src/**/*", + "Cargo.toml", + "Cargo.lock", + "LICENSE*", + "README.md", + "benches/**/*", + "examples/**/*" +] + +[workspace.lints.rust] +rust_2018_idioms = { level = "warn", priority = -1 } +unreachable_pub = "warn" +unsafe_op_in_unsafe_fn = "warn" +unused_lifetimes = "warn" +unused_macro_rules = "warn" +unused_qualifications = "warn" + +[workspace.lints.clippy] +bool_assert_comparison = "allow" +branches_sharing_code = "allow" +checked_conversions = "warn" +collapsible_else_if = "allow" +create_dir = "warn" +dbg_macro = "warn" +debug_assert_with_mut_call = "warn" +doc_markdown = "warn" +empty_enum = "warn" +enum_glob_use = "warn" +expl_impl_clone_on_copy = "warn" +explicit_deref_methods = "warn" +explicit_into_iter_loop = "warn" +fallible_impl_from = "warn" +filter_map_next = "warn" +flat_map_option = "warn" +float_cmp_const = "warn" +fn_params_excessive_bools = "warn" +from_iter_instead_of_collect = "warn" +if_same_then_else = "allow" +implicit_clone = "warn" +imprecise_flops = "warn" +inconsistent_struct_constructor = "warn" +inefficient_to_string = "warn" +infinite_loop = "warn" +invalid_upcast_comparisons = "warn" +large_digit_groups = "warn" +large_stack_arrays = "warn" +large_types_passed_by_value = "warn" +let_and_return = "allow" # sometimes good to name what you are returning +linkedlist = "warn" +lossy_float_literal = "warn" +macro_use_imports = "warn" +mem_forget = "warn" +mutex_integer = "warn" +needless_continue = "warn" +needless_for_each = "warn" +negative_feature_names = "warn" +path_buf_push_overwrite = "warn" +ptr_as_ptr = "warn" +rc_mutex = "warn" +redundant_feature_names = "warn" +ref_option_ref = "warn" +rest_pat_in_fully_bound_structs = "warn" +same_functions_in_if_condition = "warn" +self_named_module_files = "warn" +semicolon_if_nothing_returned = "warn" +str_to_string = "warn" +string_add = "warn" +string_add_assign = "warn" +string_lit_as_bytes = "warn" +string_to_string = "warn" +todo = "warn" +trait_duplication_in_bounds = "warn" +uninlined_format_args = "warn" +verbose_file_reads = "warn" +wildcard_imports = "warn" +zero_sized_map_values = "warn" + [package] name = "config" version = "0.14.0" description = "Layered configuration system for Rust applications." -homepage = "https://github.com/mehcode/config-rs" -repository = "https://github.com/mehcode/config-rs" -readme = "README.md" -keywords = ["config", "configuration", "settings", "env", "environment"] -authors = ["Ryan Leckey "] categories = ["config"] -license = "MIT OR Apache-2.0" -edition = "2018" +keywords = ["config", "configuration", "settings", "env", "environment"] +repository.workspace = true +license.workspace = true +edition.workspace = true +rust-version.workspace = true +include.workspace = true -[badges] -maintenance = { status = "actively-developed" } +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs", "--generate-link-to-definition"] + +[package.metadata.release] +pre-release-replacements = [ + {file="CHANGELOG.md", search="Unreleased", replace="{{version}}", min=1}, + {file="CHANGELOG.md", search="\\.\\.\\.HEAD", replace="...{{tag_name}}", exactly=1}, + {file="CHANGELOG.md", search="ReleaseDate", replace="{{date}}", min=1}, + {file="CHANGELOG.md", search="", replace="\n## [Unreleased] - ReleaseDate\n", exactly=1}, + {file="CHANGELOG.md", search="", replace="\n[Unreleased]: https://github.com/rust-ci/config-rs/compare/{{tag_name}}...HEAD", exactly=1}, +] [features] default = ["toml", "json", "yaml", "ini", "ron", "json5", "convert-case", "async"] @@ -53,3 +149,10 @@ glob = "0.3" notify = "6.0" temp-env = "0.3" log = { version = "0.4", features = ["serde"] } + +[[example]] +name = "async_source" +required-features = ["json", "async"] + +[lints] +workspace = true diff --git a/LICENSE-APACHE b/LICENSE-APACHE index 88c55e3c..8f71f43f 100644 --- a/LICENSE-APACHE +++ b/LICENSE-APACHE @@ -1,201 +1,202 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright 2017 Ryan Leckey - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. \ No newline at end of file + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/LICENSE-MIT b/LICENSE-MIT index e8c44de7..a2d01088 100644 --- a/LICENSE-MIT +++ b/LICENSE-MIT @@ -1,25 +1,19 @@ -Copyright (c) 2017 Ryan Leckey +Copyright (c) Individual contributors -Permission is hereby granted, free of charge, to any -person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the -Software without restriction, including without -limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software -is furnished to do so, subject to the following -conditions: +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice -shall be included in all copies or substantial portions -of the Software. +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. \ No newline at end of file +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index 58a59c3f..fc1c9833 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ [JSON]: https://github.com/serde-rs/json [TOML]: https://github.com/toml-lang/toml -[YAML]: https://github.com/Ethiraric/yaml-rust2 +[YAML]: https://github.com/chyh1990/yaml-rust2 [INI]: https://github.com/zonyitoo/rust-ini [RON]: https://github.com/ron-rs/ron [JSON5]: https://github.com/callum-oakley/json5-rs @@ -55,14 +55,11 @@ See [custom_file_format](https://github.com/mehcode/config-rs/tree/master/exampl See the [documentation](https://docs.rs/config) or [examples](https://github.com/mehcode/config-rs/tree/master/examples) for more usage information. - -## MSRV - -We currently support Rust 1.75.0 and newer. - - ## License -config-rs is primarily distributed under the terms of both the MIT license and the Apache License (Version 2.0). +Licensed under either of + +* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or ) +* MIT license ([LICENSE-MIT](LICENSE-MIT) or ) -See LICENSE-APACHE and LICENSE-MIT for details. +at your option. diff --git a/_typos.toml b/_typos.toml new file mode 100644 index 00000000..6b7dbca0 --- /dev/null +++ b/_typos.toml @@ -0,0 +1,4 @@ +[files] +extend-exclude = [ + "/tests/", +] diff --git a/committed.toml b/committed.toml new file mode 100644 index 00000000..4211ae38 --- /dev/null +++ b/committed.toml @@ -0,0 +1,3 @@ +style="conventional" +ignore_author_re="(dependabot|renovate)" +merge_commit = false diff --git a/deny.toml b/deny.toml new file mode 100644 index 00000000..b6ecbe9c --- /dev/null +++ b/deny.toml @@ -0,0 +1,236 @@ +# Note that all fields that take a lint level have these possible values: +# * deny - An error will be produced and the check will fail +# * warn - A warning will be produced, but the check will not fail +# * allow - No warning or error will be produced, though in some cases a note +# will be + +# Root options + +# The graph table configures how the dependency graph is constructed and thus +# which crates the checks are performed against +[graph] +# If 1 or more target triples (and optionally, target_features) are specified, +# only the specified targets will be checked when running `cargo deny check`. +# This means, if a particular package is only ever used as a target specific +# dependency, such as, for example, the `nix` crate only being used via the +# `target_family = "unix"` configuration, that only having windows targets in +# this list would mean the nix crate, as well as any of its exclusive +# dependencies not shared by any other crates, would be ignored, as the target +# list here is effectively saying which targets you are building for. +targets = [ + # The triple can be any string, but only the target triples built in to + # rustc (as of 1.40) can be checked against actual config expressions + #"x86_64-unknown-linux-musl", + # You can also specify which target_features you promise are enabled for a + # particular target. target_features are currently not validated against + # the actual valid features supported by the target architecture. + #{ triple = "wasm32-unknown-unknown", features = ["atomics"] }, +] +# When creating the dependency graph used as the source of truth when checks are +# executed, this field can be used to prune crates from the graph, removing them +# from the view of cargo-deny. This is an extremely heavy hammer, as if a crate +# is pruned from the graph, all of its dependencies will also be pruned unless +# they are connected to another crate in the graph that hasn't been pruned, +# so it should be used with care. The identifiers are [Package ID Specifications] +# (https://doc.rust-lang.org/cargo/reference/pkgid-spec.html) +#exclude = [] +# If true, metadata will be collected with `--all-features`. Note that this can't +# be toggled off if true, if you want to conditionally enable `--all-features` it +# is recommended to pass `--all-features` on the cmd line instead +all-features = false +# If true, metadata will be collected with `--no-default-features`. The same +# caveat with `all-features` applies +no-default-features = false +# If set, these feature will be enabled when collecting metadata. If `--features` +# is specified on the cmd line they will take precedence over this option. +#features = [] + +# The output table provides options for how/if diagnostics are outputted +[output] +# When outputting inclusion graphs in diagnostics that include features, this +# option can be used to specify the depth at which feature edges will be added. +# This option is included since the graphs can be quite large and the addition +# of features from the crate(s) to all of the graph roots can be far too verbose. +# This option can be overridden via `--feature-depth` on the cmd line +feature-depth = 1 + +# This section is considered when running `cargo deny check advisories` +# More documentation for the advisories section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html +[advisories] +# The path where the advisory databases are cloned/fetched into +#db-path = "$CARGO_HOME/advisory-dbs" +# The url(s) of the advisory databases to use +#db-urls = ["https://github.com/rustsec/advisory-db"] +# A list of advisory IDs to ignore. Note that ignored advisories will still +# output a note when they are encountered. +ignore = [ + #"RUSTSEC-0000-0000", + #{ id = "RUSTSEC-0000-0000", reason = "you can specify a reason the advisory is ignored" }, + #"a-crate-that-is-yanked@0.1.1", # you can also ignore yanked crate versions if you wish + #{ crate = "a-crate-that-is-yanked@0.1.1", reason = "you can specify why you are ignoring the yanked crate" }, +] +# If this is true, then cargo deny will use the git executable to fetch advisory database. +# If this is false, then it uses a built-in git library. +# Setting this to true can be helpful if you have special authentication requirements that cargo-deny does not support. +# See Git Authentication for more information about setting up git authentication. +#git-fetch-with-cli = true + +# This section is considered when running `cargo deny check licenses` +# More documentation for the licenses section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html +[licenses] +# List of explicitly allowed licenses +# See https://spdx.org/licenses/ for list of possible licenses +# [possible values: any SPDX 3.11 short identifier (+ optional exception)]. +allow = [ + "MIT", + "MIT-0", + "Apache-2.0", + "BSD-3-Clause", + "MPL-2.0", + "Unicode-DFS-2016", + "CC0-1.0", + "ISC", + "OpenSSL", +] +# The confidence threshold for detecting a license from license text. +# The higher the value, the more closely the license text must be to the +# canonical license text of a valid SPDX license file. +# [possible values: any between 0.0 and 1.0]. +confidence-threshold = 0.8 +# Allow 1 or more licenses on a per-crate basis, so that particular licenses +# aren't accepted for every possible crate as with the normal allow list +exceptions = [ + # Each entry is the crate and version constraint, and its specific allow + # list + #{ allow = ["Zlib"], crate = "adler32" }, +] + +# Some crates don't have (easily) machine readable licensing information, +# adding a clarification entry for it allows you to manually specify the +# licensing information +[[licenses.clarify]] +# The package spec the clarification applies to +crate = "ring" +# The SPDX expression for the license requirements of the crate +expression = "MIT AND ISC AND OpenSSL" +# One or more files in the crate's source used as the "source of truth" for +# the license expression. If the contents match, the clarification will be used +# when running the license check, otherwise the clarification will be ignored +# and the crate will be checked normally, which may produce warnings or errors +# depending on the rest of your configuration +license-files = [ +# Each entry is a crate relative path, and the (opaque) hash of its contents +{ path = "LICENSE", hash = 0xbd0eed23 } +] + +[licenses.private] +# If true, ignores workspace crates that aren't published, or are only +# published to private registries. +# To see how to mark a crate as unpublished (to the official registry), +# visit https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field. +ignore = true +# One or more private registries that you might publish crates to, if a crate +# is only published to private registries, and ignore is true, the crate will +# not have its license(s) checked +registries = [ + #"https://sekretz.com/registry +] + +# This section is considered when running `cargo deny check bans`. +# More documentation about the 'bans' section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html +[bans] +# Lint level for when multiple versions of the same crate are detected +multiple-versions = "warn" +# Lint level for when a crate version requirement is `*` +wildcards = "allow" +# The graph highlighting used when creating dotgraphs for crates +# with multiple versions +# * lowest-version - The path to the lowest versioned duplicate is highlighted +# * simplest-path - The path to the version with the fewest edges is highlighted +# * all - Both lowest-version and simplest-path are used +highlight = "all" +# The default lint level for `default` features for crates that are members of +# the workspace that is being checked. This can be overridden by allowing/denying +# `default` on a crate-by-crate basis if desired. +workspace-default-features = "allow" +# The default lint level for `default` features for external crates that are not +# members of the workspace. This can be overridden by allowing/denying `default` +# on a crate-by-crate basis if desired. +external-default-features = "allow" +# List of crates that are allowed. Use with care! +allow = [ + #"ansi_term@0.11.0", + #{ crate = "ansi_term@0.11.0", reason = "you can specify a reason it is allowed" }, +] +# List of crates to deny +deny = [ + #"ansi_term@0.11.0", + #{ crate = "ansi_term@0.11.0", reason = "you can specify a reason it is banned" }, + # Wrapper crates can optionally be specified to allow the crate when it + # is a direct dependency of the otherwise banned crate + #{ crate = "ansi_term@0.11.0", wrappers = ["this-crate-directly-depends-on-ansi_term"] }, +] + +# List of features to allow/deny +# Each entry the name of a crate and a version range. If version is +# not specified, all versions will be matched. +#[[bans.features]] +#crate = "reqwest" +# Features to not allow +#deny = ["json"] +# Features to allow +#allow = [ +# "rustls", +# "__rustls", +# "__tls", +# "hyper-rustls", +# "rustls", +# "rustls-pemfile", +# "rustls-tls-webpki-roots", +# "tokio-rustls", +# "webpki-roots", +#] +# If true, the allowed features must exactly match the enabled feature set. If +# this is set there is no point setting `deny` +#exact = true + +# Certain crates/versions that will be skipped when doing duplicate detection. +skip = [ + #"ansi_term@0.11.0", + #{ crate = "ansi_term@0.11.0", reason = "you can specify a reason why it can't be updated/removed" }, +] +# Similarly to `skip` allows you to skip certain crates during duplicate +# detection. Unlike skip, it also includes the entire tree of transitive +# dependencies starting at the specified crate, up to a certain depth, which is +# by default infinite. +skip-tree = [ + #"ansi_term@0.11.0", # will be skipped along with _all_ of its direct and transitive dependencies + #{ crate = "ansi_term@0.11.0", depth = 20 }, +] + +# This section is considered when running `cargo deny check sources`. +# More documentation about the 'sources' section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html +[sources] +# Lint level for what to happen when a crate from a crate registry that is not +# in the allow list is encountered +unknown-registry = "deny" +# Lint level for what to happen when a crate from a git repository that is not +# in the allow list is encountered +unknown-git = "deny" +# List of URLs for allowed crate registries. Defaults to the crates.io index +# if not specified. If it is specified but empty, no registries are allowed. +allow-registry = ["https://github.com/rust-lang/crates.io-index"] +# List of URLs for allowed Git repositories +allow-git = [] + +[sources.allow-org] +# 1 or more github.com organizations to allow git sources for +github = [] +# 1 or more gitlab.com organizations to allow git sources for +gitlab = [] +# 1 or more bitbucket.org organizations to allow git sources for +bitbucket = [] diff --git a/examples/custom_file_format/main.rs b/examples/custom_file_format/main.rs index cc919ca8..2fd47b80 100644 --- a/examples/custom_file_format/main.rs +++ b/examples/custom_file_format/main.rs @@ -2,7 +2,7 @@ use config::{Config, File, FileStoredFormat, Format, Map, Value, ValueKind}; use std::io::{Error, ErrorKind}; /// The private and public key sources will be read into their associated variable: -#[derive(serde::Deserialize, Clone, Debug)] +#[derive(serde_derive::Deserialize, Clone, Debug)] pub struct Settings { pub private_key: Option, pub public_key: Option, @@ -23,7 +23,7 @@ fn main() { // Deserialize the config object into your Settings struct: let settings: Settings = settings.try_deserialize().unwrap(); - println!("{:#?}", settings); + println!("{settings:#?}"); } #[derive(Debug, Clone)] @@ -34,7 +34,7 @@ impl Format for PemFile { &self, uri: Option<&String>, text: &str, - ) -> Result, Box> { + ) -> Result, Box> { // Store any valid keys into this map, they'll be merged with other sources into the final config map: let mut result = Map::new(); diff --git a/examples/custom_str_format/main.rs b/examples/custom_str_format/main.rs index 4da2e9dc..e78f2a9e 100644 --- a/examples/custom_str_format/main.rs +++ b/examples/custom_str_format/main.rs @@ -7,8 +7,8 @@ fn main() { .build(); match config { - Ok(cfg) => println!("A config: {:#?}", cfg), - Err(e) => println!("An error: {}", e), + Ok(cfg) => println!("A config: {cfg:#?}"), + Err(e) => println!("An error: {e}"), } } @@ -20,7 +20,7 @@ impl Format for MyFormat { &self, uri: Option<&String>, text: &str, - ) -> Result, Box> { + ) -> Result, Box> { // Let's assume our format is somewhat malformed, but this is fine // In real life anything can be used here - nom, serde or other. // @@ -29,11 +29,11 @@ impl Format for MyFormat { if text == "good" { result.insert( - "key".to_string(), + "key".to_owned(), Value::new(uri, ValueKind::String(text.into())), ); } else { - println!("Something went wrong in {:?}", uri); + println!("Something went wrong in {uri:?}"); } Ok(result) diff --git a/examples/hierarchical-env/main.rs b/examples/hierarchical-env/main.rs index ee1b69bd..08409494 100644 --- a/examples/hierarchical-env/main.rs +++ b/examples/hierarchical-env/main.rs @@ -6,5 +6,5 @@ fn main() { let settings = Settings::new(); // Print out our settings - println!("{:?}", settings); + println!("{settings:?}"); } diff --git a/examples/hierarchical-env/settings.rs b/examples/hierarchical-env/settings.rs index 65b5f875..3960a9a5 100644 --- a/examples/hierarchical-env/settings.rs +++ b/examples/hierarchical-env/settings.rs @@ -34,7 +34,7 @@ struct Braintree { #[derive(Debug, Deserialize)] #[allow(unused)] -pub struct Settings { +pub(crate) struct Settings { debug: bool, database: Database, sparkpost: Sparkpost, @@ -43,7 +43,7 @@ pub struct Settings { } impl Settings { - pub fn new() -> Result { + pub(crate) fn new() -> Result { let run_mode = env::var("RUN_MODE").unwrap_or_else(|_| "development".into()); let s = Config::builder() @@ -53,7 +53,7 @@ impl Settings { // Default to 'development' env // Note that this file is _optional_ .add_source( - File::with_name(&format!("examples/hierarchical-env/config/{}", run_mode)) + File::with_name(&format!("examples/hierarchical-env/config/{run_mode}")) .required(false), ) // Add in a local configuration file diff --git a/examples/watch/main.rs b/examples/watch/main.rs index ca83572c..7f35e869 100644 --- a/examples/watch/main.rs +++ b/examples/watch/main.rs @@ -28,7 +28,7 @@ fn show() { ); } -fn watch() { +fn watch() -> ! { // Create a channel to receive the events. let (tx, rx) = channel(); @@ -62,7 +62,7 @@ fn watch() { show(); } - Err(e) => println!("watch error: {:?}", e), + Err(e) => println!("watch error: {e:?}"), _ => { // Ignore event diff --git a/external-types.toml b/external-types.toml deleted file mode 100644 index d2802ad2..00000000 --- a/external-types.toml +++ /dev/null @@ -1,10 +0,0 @@ -allowed_external_types = [ - "convert_case::case::Case", - "nom::error::ErrorKind", - "serde::de::Deserialize", - "serde::de::Deserializer", - "serde::de::Error", - "serde::ser::Error", - "serde::ser::Serialize", -] - diff --git a/release.toml b/release.toml new file mode 100644 index 00000000..e70fb753 --- /dev/null +++ b/release.toml @@ -0,0 +1,3 @@ +owners = ["github:rust-cli:Maintainers"] +dependent-version = "fix" +allow-branch = ["main"] diff --git a/src/builder.rs b/src/builder.rs index 45ebb744..465f3ae9 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -38,6 +38,8 @@ use crate::{config::Config, path::Expression, source::Source, value::Value}; /// # use config::*; /// # use std::error::Error; /// # fn main() -> Result<(), Box> { +/// # #[cfg(feature = "json")] +/// # { /// let mut builder = Config::builder() /// .set_default("default", "1")? /// .add_source(File::new("config/settings", FileFormat::Json)) @@ -52,6 +54,7 @@ use crate::{config::Config, path::Expression, source::Source, value::Value}; /// // something went wrong /// } /// } +/// # } /// # Ok(()) /// # } /// ``` @@ -64,11 +67,14 @@ use crate::{config::Config, path::Expression, source::Source, value::Value}; /// # use std::error::Error; /// # use config::*; /// # fn main() -> Result<(), Box> { +/// # #[cfg(feature = "json")] +/// # { /// let mut builder = Config::builder(); /// builder = builder.set_default("default", "1")?; /// builder = builder.add_source(File::new("config/settings", FileFormat::Json)); /// builder = builder.add_source(File::new("config/settings.prod", FileFormat::Json)); /// builder = builder.set_override("override", "1")?; +/// # } /// # Ok(()) /// # } /// ``` @@ -97,7 +103,7 @@ pub struct ConfigBuilder { /// Represents [`ConfigBuilder`] state. pub trait BuilderState {} -/// Represents data specific to builder in default, sychronous state, without support for async. +/// Represents data specific to builder in default, synchronous state, without support for async. #[derive(Debug, Default, Clone)] pub struct DefaultState { sources: Vec>, @@ -118,7 +124,7 @@ pub struct DefaultState { #[derive(Debug, Clone, Default)] pub struct AsyncConfigBuilder {} -/// Represents data specific to builder in asychronous state, with support for async. +/// Represents data specific to builder in asynchronous state, with support for async. #[derive(Debug, Default, Clone)] pub struct AsyncState { sources: Vec, diff --git a/src/de.rs b/src/de.rs index 0e2b8de6..1a19730c 100644 --- a/src/de.rs +++ b/src/de.rs @@ -269,7 +269,7 @@ struct EnumAccess { } impl EnumAccess { - fn variant_deserializer(&self, name: &str) -> Result { + fn variant_deserializer(&self, name: &str) -> Result> { self.variants .iter() .find(|&&s| s.to_lowercase() == name.to_lowercase()) // changing to lowercase will enable deserialization of lowercase values to enums @@ -277,7 +277,7 @@ impl EnumAccess { .ok_or_else(|| self.no_constructor_error(name)) } - fn table_deserializer(&self, table: &Table) -> Result { + fn table_deserializer(&self, table: &Table) -> Result> { if table.len() == 1 { self.variant_deserializer(table.iter().next().unwrap().0) } else { diff --git a/src/env.rs b/src/env.rs index d817d3a7..9848d69a 100644 --- a/src/env.rs +++ b/src/env.rs @@ -34,12 +34,12 @@ pub struct Environment { /// Optional directive to translate collected keys into a form that matches what serializers /// that the configuration would expect. For example if you have the `kebab-case` attribute - /// for your serde config types, you may want to pass Case::Kebab here. + /// for your serde config types, you may want to pass `Case::Kebab` here. #[cfg(feature = "convert-case")] - convert_case: Option, + convert_case: Option, - /// Optional character sequence that separates each env value into a vector. only works when try_parsing is set to true - /// Once set, you cannot have type String on the same environment, unless you set list_parse_keys. + /// Optional character sequence that separates each env value into a vector. only works when `try_parsing` is set to true + /// Once set, you cannot have type String on the same environment, unless you set `list_parse_keys`. list_separator: Option, /// A list of keys which should always be parsed as a list. If not set you can have only `Vec` or `String` (not both) in one environment. list_parse_keys: Option>, @@ -110,7 +110,7 @@ impl Environment { } } - /// See [Environment::with_prefix] + /// See [`Environment::with_prefix`] pub fn prefix(mut self, s: &str) -> Self { self.prefix = Some(s.into()); self @@ -141,7 +141,7 @@ impl Environment { self } - /// When set and try_parsing is true, then all environment variables will be parsed as [`Vec`] instead of [`String`]. + /// When set and `try_parsing` is true, then all environment variables will be parsed as [`Vec`] instead of [`String`]. /// See /// [`with_list_parse_key`](Self::with_list_parse_key) /// when you want to use [`Vec`] in combination with [`String`]. @@ -151,11 +151,11 @@ impl Environment { } /// Add a key which should be parsed as a list when collecting [`Value`]s from the environment. - /// Once list_separator is set, the type for string is [`Vec`]. + /// Once `list_separator` is set, the type for string is [`Vec`]. /// To switch the default type back to type Strings you need to provide the keys which should be [`Vec`] using this function. pub fn with_list_parse_key(mut self, key: &str) -> Self { if self.list_parse_keys.is_none() { - self.list_parse_keys = Some(vec![key.to_lowercase()]) + self.list_parse_keys = Some(vec![key.to_lowercase()]); } else { self.list_parse_keys = self.list_parse_keys.map(|mut keys| { keys.push(key.to_lowercase()); @@ -246,7 +246,7 @@ impl Source for Environment { let prefix_pattern = self .prefix .as_ref() - .map(|prefix| format!("{}{}", prefix, prefix_separator).to_lowercase()); + .map(|prefix| format!("{prefix}{prefix_separator}").to_lowercase()); let collector = |(key, value): (String, String)| { // Treat empty environment variables as unset @@ -295,7 +295,7 @@ impl Source for Environment { if keys.contains(&key) { let v: Vec = value .split(separator) - .map(|s| Value::new(Some(&uri), ValueKind::String(s.to_string()))) + .map(|s| Value::new(Some(&uri), ValueKind::String(s.to_owned()))) .collect(); ValueKind::Array(v) } else { @@ -304,7 +304,7 @@ impl Source for Environment { } else { let v: Vec = value .split(separator) - .map(|s| Value::new(Some(&uri), ValueKind::String(s.to_string()))) + .map(|s| Value::new(Some(&uri), ValueKind::String(s.to_owned()))) .collect(); ValueKind::Array(v) } diff --git a/src/error.rs b/src/error.rs index 3cb50e03..8f3363b8 100644 --- a/src/error.rs +++ b/src/error.rs @@ -20,15 +20,15 @@ pub enum Unexpected { } impl fmt::Display for Unexpected { - fn fmt(&self, f: &mut fmt::Formatter) -> result::Result<(), fmt::Error> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> result::Result<(), fmt::Error> { match *self { - Unexpected::Bool(b) => write!(f, "boolean `{}`", b), - Unexpected::I64(i) => write!(f, "64-bit integer `{}`", i), - Unexpected::I128(i) => write!(f, "128-bit integer `{}`", i), - Unexpected::U64(i) => write!(f, "64-bit unsigned integer `{}`", i), - Unexpected::U128(i) => write!(f, "128-bit unsigned integer `{}`", i), - Unexpected::Float(v) => write!(f, "floating point `{}`", v), - Unexpected::Str(ref s) => write!(f, "string {:?}", s), + Unexpected::Bool(b) => write!(f, "boolean `{b}`"), + Unexpected::I64(i) => write!(f, "64-bit integer `{i}`"), + Unexpected::I128(i) => write!(f, "128-bit integer `{i}`"), + Unexpected::U64(i) => write!(f, "64-bit unsigned integer `{i}`"), + Unexpected::U128(i) => write!(f, "128-bit unsigned integer `{i}`"), + Unexpected::Float(v) => write!(f, "floating point `{v}`"), + Unexpected::Str(ref s) => write!(f, "string {s:?}"), Unexpected::Unit => write!(f, "unit value"), Unexpected::Seq => write!(f, "sequence"), Unexpected::Map => write!(f, "map"), @@ -142,7 +142,7 @@ impl ConfigError { } else { "" }; - format!("{}{}{}", segment, dot, key) + format!("{segment}{dot}{key}") }; match self { Self::Type { @@ -168,33 +168,33 @@ impl ConfigError { #[must_use] pub(crate) fn prepend_index(self, idx: usize) -> Self { - self.prepend(&format!("[{}]", idx), false) + self.prepend(&format!("[{idx}]"), false) } } /// Alias for a `Result` with the error type set to `ConfigError`. -pub type Result = result::Result; +pub(crate) type Result = result::Result; // Forward Debug to Display for readable panic! messages impl fmt::Debug for ConfigError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", *self) } } impl fmt::Display for ConfigError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { ConfigError::Frozen => write!(f, "configuration is frozen"), ConfigError::PathParse(ref kind) => write!(f, "{}", kind.description()), - ConfigError::Message(ref s) => write!(f, "{}", s), + ConfigError::Message(ref s) => write!(f, "{s}"), - ConfigError::Foreign(ref cause) => write!(f, "{}", cause), + ConfigError::Foreign(ref cause) => write!(f, "{cause}"), ConfigError::NotFound(ref key) => { - write!(f, "configuration property {:?} not found", key) + write!(f, "configuration property {key:?} not found") } ConfigError::Type { @@ -203,24 +203,24 @@ impl fmt::Display for ConfigError { expected, ref key, } => { - write!(f, "invalid type: {}, expected {}", unexpected, expected)?; + write!(f, "invalid type: {unexpected}, expected {expected}")?; if let Some(ref key) = *key { - write!(f, " for key `{}`", key)?; + write!(f, " for key `{key}`")?; } if let Some(ref origin) = *origin { - write!(f, " in {}", origin)?; + write!(f, " in {origin}")?; } Ok(()) } ConfigError::FileParse { ref cause, ref uri } => { - write!(f, "{}", cause)?; + write!(f, "{cause}")?; if let Some(ref uri) = *uri { - write!(f, " in {}", uri)?; + write!(f, " in {uri}")?; } Ok(()) diff --git a/src/file/format/ini.rs b/src/file/format/ini.rs index 9295e60e..7394d6dd 100644 --- a/src/file/format/ini.rs +++ b/src/file/format/ini.rs @@ -5,7 +5,7 @@ use ini::Ini; use crate::map::Map; use crate::value::{Value, ValueKind}; -pub fn parse( +pub(crate) fn parse( uri: Option<&String>, text: &str, ) -> Result, Box> { diff --git a/src/file/format/json.rs b/src/file/format/json.rs index bd506f0d..9100b662 100644 --- a/src/file/format/json.rs +++ b/src/file/format/json.rs @@ -4,7 +4,7 @@ use crate::format; use crate::map::Map; use crate::value::{Value, ValueKind}; -pub fn parse( +pub(crate) fn parse( uri: Option<&String>, text: &str, ) -> Result, Box> { diff --git a/src/file/format/json5.rs b/src/file/format/json5.rs index 99003bd0..c0e557fc 100644 --- a/src/file/format/json5.rs +++ b/src/file/format/json5.rs @@ -6,7 +6,7 @@ use crate::value::{Value, ValueKind}; #[derive(serde::Deserialize, Debug)] #[serde(untagged)] -pub enum Val { +pub(crate) enum Val { Null, Boolean(bool), Integer(i64), @@ -16,7 +16,7 @@ pub enum Val { Object(Map), } -pub fn parse( +pub(crate) fn parse( uri: Option<&String>, text: &str, ) -> Result, Box> { diff --git a/src/file/format/mod.rs b/src/file/format/mod.rs index 76489d4f..1242d153 100644 --- a/src/file/format/mod.rs +++ b/src/file/format/mod.rs @@ -29,22 +29,22 @@ mod json5; /// File formats provided by the library. /// -/// Although it is possible to define custom formats using [`Format`] trait it is recommended to use FileFormat if possible. +/// Although it is possible to define custom formats using [`Format`] trait it is recommended to use `FileFormat` if possible. #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] pub enum FileFormat { /// TOML (parsed with toml) #[cfg(feature = "toml")] Toml, - /// JSON (parsed with serde_json) + /// JSON (parsed with `serde_json`) #[cfg(feature = "json")] Json, - /// YAML (parsed with yaml_rust2) + /// YAML (parsed with `yaml_rust2`) #[cfg(feature = "yaml")] Yaml, - /// INI (parsed with rust_ini) + /// INI (parsed with `rust_ini`) #[cfg(feature = "ini")] Ini, diff --git a/src/file/format/ron.rs b/src/file/format/ron.rs index 9ac81a9d..2911a73c 100644 --- a/src/file/format/ron.rs +++ b/src/file/format/ron.rs @@ -4,7 +4,7 @@ use crate::format; use crate::map::Map; use crate::value::{Value, ValueKind}; -pub fn parse( +pub(crate) fn parse( uri: Option<&String>, text: &str, ) -> Result, Box> { diff --git a/src/file/format/toml.rs b/src/file/format/toml.rs index 19b78044..26130779 100644 --- a/src/file/format/toml.rs +++ b/src/file/format/toml.rs @@ -4,7 +4,7 @@ use crate::format; use crate::map::Map; use crate::value::Value; -pub fn parse( +pub(crate) fn parse( uri: Option<&String>, text: &str, ) -> Result, Box> { diff --git a/src/file/format/yaml.rs b/src/file/format/yaml.rs index 63189c7a..cc162ba2 100644 --- a/src/file/format/yaml.rs +++ b/src/file/format/yaml.rs @@ -8,7 +8,7 @@ use crate::format; use crate::map::Map; use crate::value::{Value, ValueKind}; -pub fn parse( +pub(crate) fn parse( uri: Option<&String>, text: &str, ) -> Result, Box> { @@ -78,7 +78,7 @@ fn from_yaml_value( struct MultipleDocumentsError(usize); impl fmt::Display for MultipleDocumentsError { - fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, format: &mut fmt::Formatter<'_>) -> fmt::Result { write!(format, "Got {} YAML documents, expected 1", self.0) } } @@ -93,7 +93,7 @@ impl Error for MultipleDocumentsError { struct FloatParsingError(String); impl fmt::Display for FloatParsingError { - fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, format: &mut fmt::Formatter<'_>) -> fmt::Result { write!(format, "Parsing {} as floating point number failed", self.0) } } diff --git a/src/file/mod.rs b/src/file/mod.rs index 0443a04b..7fc4874e 100644 --- a/src/file/mod.rs +++ b/src/file/mod.rs @@ -1,5 +1,5 @@ mod format; -pub mod source; +pub(crate) mod source; use std::fmt::Debug; use std::path::{Path, PathBuf}; @@ -31,7 +31,7 @@ pub struct File { required: bool, } -/// An extension of [`Format`](crate::Format) trait. +/// An extension of [`Format`] trait. /// /// Associates format with file extensions, therefore linking storage-agnostic notion of format to a file system. pub trait FileStoredFormat: Format { @@ -39,7 +39,7 @@ pub trait FileStoredFormat: Format { fn file_extensions(&self) -> &'static [&'static str]; } -impl File +impl File where F: FileStoredFormat + 'static, { @@ -52,7 +52,7 @@ where } } -impl File +impl File where F: FileStoredFormat + 'static, { @@ -60,39 +60,39 @@ where Self { format: Some(format), required: true, - source: source::file::FileSourceFile::new(name.into()), + source: FileSourceFile::new(name.into()), } } } -impl File { +impl File { /// Given the basename of a file, will attempt to locate a file by setting its /// extension to a registered format. pub fn with_name(name: &str) -> Self { Self { format: None, required: true, - source: source::file::FileSourceFile::new(name.into()), + source: FileSourceFile::new(name.into()), } } } -impl<'a> From<&'a Path> for File { +impl<'a> From<&'a Path> for File { fn from(path: &'a Path) -> Self { Self { format: None, required: true, - source: source::file::FileSourceFile::new(path.to_path_buf()), + source: FileSourceFile::new(path.to_path_buf()), } } } -impl From for File { +impl From for File { fn from(path: PathBuf) -> Self { Self { format: None, required: true, - source: source::file::FileSourceFile::new(path), + source: FileSourceFile::new(path), } } } diff --git a/src/file/source/mod.rs b/src/file/source/mod.rs index 3c3d10ca..d017a765 100644 --- a/src/file/source/mod.rs +++ b/src/file/source/mod.rs @@ -1,5 +1,5 @@ -pub mod file; -pub mod string; +pub(crate) mod file; +pub(crate) mod string; use std::error::Error; use std::fmt::Debug; diff --git a/src/format.rs b/src/format.rs index 3d1ca335..8b4016d7 100644 --- a/src/format.rs +++ b/src/format.rs @@ -25,7 +25,7 @@ pub trait Format { } // Have a proper error fire if the root of a file is ever not a Table -pub fn extract_root_table( +pub(crate) fn extract_root_table( uri: Option<&String>, value: Value, ) -> Result, Box> { diff --git a/src/lib.rs b/src/lib.rs index b618e75b..fdbe0b3f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -17,8 +17,10 @@ //! //! See the [examples](https://github.com/mehcode/config-rs/tree/master/examples) for //! general usage information. -#![allow(unknown_lints)] -// #![warn(missing_docs)] + +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![warn(clippy::print_stderr)] +#![warn(clippy::print_stdout)] pub mod builder; mod config; diff --git a/src/path/mod.rs b/src/path/mod.rs index 7a0903a4..730d0edf 100644 --- a/src/path/mod.rs +++ b/src/path/mod.rs @@ -7,7 +7,7 @@ use crate::value::{Value, ValueKind}; mod parser; #[derive(Debug, Eq, PartialEq, Clone, Hash)] -pub enum Expression { +pub(crate) enum Expression { Identifier(String), Child(Box, String), Subscript(Box, isize), @@ -25,12 +25,12 @@ fn sindex_to_uindex(index: isize, len: usize) -> usize { if index >= 0 { index as usize } else { - len - (index.abs() as usize) + len - index.unsigned_abs() } } impl Expression { - pub fn get(self, root: &Value) -> Option<&Value> { + pub(crate) fn get(self, root: &Value) -> Option<&Value> { match self { Self::Identifier(id) => { match root.kind { @@ -78,7 +78,7 @@ impl Expression { } } - pub fn get_mut<'a>(&self, root: &'a mut Value) -> Option<&'a mut Value> { + pub(crate) fn get_mut<'a>(&self, root: &'a mut Value) -> Option<&'a mut Value> { match *self { Self::Identifier(ref id) => match root.kind { ValueKind::Table(ref mut map) => map.get_mut(id), @@ -116,7 +116,7 @@ impl Expression { } } - pub fn get_mut_forcibly<'a>(&self, root: &'a mut Value) -> Option<&'a mut Value> { + pub(crate) fn get_mut_forcibly<'a>(&self, root: &'a mut Value) -> Option<&'a mut Value> { match *self { Self::Identifier(ref id) => match root.kind { ValueKind::Table(ref mut map) => Some( @@ -177,7 +177,7 @@ impl Expression { } } - pub fn set(&self, root: &mut Value, value: Value) { + pub(crate) fn set(&self, root: &mut Value, value: Value) { match *self { Self::Identifier(ref id) => { // Ensure that root is a table @@ -231,7 +231,7 @@ impl Expression { Self::Subscript(ref expr, index) => { if let Some(parent) = expr.get_mut_forcibly(root) { if !matches!(parent.kind, ValueKind::Array(_)) { - *parent = Vec::::new().into() + *parent = Vec::::new().into(); } if let ValueKind::Array(ref mut array) = parent.kind { diff --git a/src/path/parser.rs b/src/path/parser.rs index 8378121b..fbb8ba24 100644 --- a/src/path/parser.rs +++ b/src/path/parser.rs @@ -48,7 +48,7 @@ fn postfix<'a>(expr: Expression) -> impl FnMut(&'a str) -> IResult<&'a str, Expr alt((child, subscript)) } -pub fn from_str(input: &str) -> Result { +pub(crate) fn from_str(input: &str) -> Result { match ident(input) { Ok((mut rem, mut expr)) => { while !rem.is_empty() { @@ -73,7 +73,7 @@ pub fn from_str(input: &str) -> Result { } } -pub fn to_error_kind(e: Err>) -> ErrorKind { +pub(crate) fn to_error_kind(e: Err>) -> ErrorKind { match e { Err::Incomplete(_) => ErrorKind::Complete, Err::Failure(e) | Err::Error(e) => e.code, diff --git a/src/ser.rs b/src/ser.rs index 1c160986..bb5af9e3 100644 --- a/src/ser.rs +++ b/src/ser.rs @@ -20,7 +20,7 @@ enum SerKey { } /// An uninhabited type: no values like this can ever exist! -pub enum Unreachable {} +pub(crate) enum Unreachable {} /// Serializer for numbered sequences /// @@ -55,17 +55,13 @@ impl ConfigSerializer { let mut whole = match keys.next() { Some(SerKey::Named(s)) => s.clone(), - _ => { - return Err(ConfigError::Message( - "top level is not a struct".to_string(), - )) - } + _ => return Err(ConfigError::Message("top level is not a struct".to_owned())), }; for k in keys { match k { - SerKey::Named(s) => write!(whole, ".{}", s), - SerKey::Seq(i) => write!(whole, "[{}]", i), + SerKey::Named(s) => write!(whole, ".{s}"), + SerKey::Seq(i) => write!(whole, "[{i}]"), } .expect("write! to a string failed"); } @@ -74,7 +70,7 @@ impl ConfigSerializer { } fn push_key(&mut self, key: &str) { - self.keys.push(SerKey::Named(key.to_string())); + self.keys.push(SerKey::Named(key.to_owned())); } fn pop_key(&mut self) { @@ -126,11 +122,11 @@ impl<'a> ser::Serializer for &'a mut ConfigSerializer { } fn serialize_u64(self, v: u64) -> Result { - if v > (i64::max_value() as u64) { + if v > (i64::MAX as u64) { Err(ConfigError::Message(format!( "value {} is greater than the max {}", v, - i64::max_value() + i64::MAX ))) } else { self.serialize_i64(v as i64) @@ -150,7 +146,7 @@ impl<'a> ser::Serializer for &'a mut ConfigSerializer { } fn serialize_str(self, v: &str) -> Result { - self.serialize_primitive(v.to_string()) + self.serialize_primitive(v.to_owned()) } fn serialize_bytes(self, v: &[u8]) -> Result { @@ -288,7 +284,7 @@ impl<'a> ser::SerializeSeq for SeqSerializer<'a> { Some(SerKey::Seq(i)) => *i += 1, _ => { return Err(ConfigError::Message( - "config-rs internal error (ser._element but last not Seq!".to_string(), + "config-rs internal error (ser._element but last not Seq!".to_owned(), )) } }; @@ -418,11 +414,12 @@ impl<'a> ser::SerializeStructVariant for &'a mut ConfigSerializer { } } -pub struct StringKeySerializer; +pub(crate) struct StringKeySerializer; /// Define `$emthod`, `serialize_foo`, taking `$type` and serialising it via [`Display`] macro_rules! string_serialize_via_display { { $method:ident, $type:ty } => { fn $method(self, v: $type) -> Result { + #[allow(clippy::str_to_string)] Ok(v.to_string()) } } } @@ -481,7 +478,7 @@ impl ser::Serializer for StringKeySerializer { _variant_index: u32, variant: &str, ) -> Result { - Ok(variant.to_string()) + Ok(variant.to_owned()) } fn serialize_newtype_struct(self, _name: &str, value: &T) -> Result @@ -506,20 +503,19 @@ impl ser::Serializer for StringKeySerializer { fn serialize_seq(self, _len: Option) -> Result { Err(ConfigError::Message( - "seq can't serialize to string key".to_string(), + "seq can't serialize to string key".to_owned(), )) } fn serialize_tuple(self, _len: usize) -> Result { Err(ConfigError::Message( - "tuple can't serialize to string key".to_string(), + "tuple can't serialize to string key".to_owned(), )) } fn serialize_tuple_struct(self, name: &str, _len: usize) -> Result { Err(ConfigError::Message(format!( - "tuple struct {} can't serialize to string key", - name + "tuple struct {name} can't serialize to string key" ))) } @@ -531,21 +527,19 @@ impl ser::Serializer for StringKeySerializer { _len: usize, ) -> Result { Err(ConfigError::Message(format!( - "tuple variant {}::{} can't serialize to string key", - name, variant + "tuple variant {name}::{variant} can't serialize to string key" ))) } fn serialize_map(self, _len: Option) -> Result { Err(ConfigError::Message( - "map can't serialize to string key".to_string(), + "map can't serialize to string key".to_owned(), )) } fn serialize_struct(self, name: &str, _len: usize) -> Result { Err(ConfigError::Message(format!( - "struct {} can't serialize to string key", - name + "struct {name} can't serialize to string key" ))) } @@ -557,8 +551,7 @@ impl ser::Serializer for StringKeySerializer { _len: usize, ) -> Result { Err(ConfigError::Message(format!( - "struct variant {}::{} can't serialize to string key", - name, variant + "struct variant {name}::{variant} can't serialize to string key" ))) } } @@ -686,6 +679,8 @@ impl ser::SerializeStructVariant for Unreachable { mod test { use super::*; use serde::{Deserialize, Serialize}; + #[cfg(not(feature = "json5"))] + use serde_derive::{Deserialize, Serialize}; #[test] fn test_struct() { @@ -697,7 +692,7 @@ mod test { let test = Test { int: 1, - seq: vec!["a".to_string(), "b".to_string()], + seq: vec!["a".to_owned(), "b".to_owned()], }; let config = Config::try_from(&test).unwrap(); @@ -706,6 +701,7 @@ mod test { } #[test] + #[cfg(feature = "json")] fn test_nest() { let val = serde_json::json! { { "top": { diff --git a/src/source.rs b/src/source.rs index 16e19385..b68e3950 100644 --- a/src/source.rs +++ b/src/source.rs @@ -32,8 +32,8 @@ fn set_value(cache: &mut Value, key: &str, value: &Value) { // Set using the path Ok(expr) => expr.set(cache, value.clone()), - // Set diretly anyway - _ => path::Expression::Identifier(key.to_string()).set(cache, value.clone()), + // Set directly anyway + _ => path::Expression::Identifier(key.to_owned()).set(cache, value.clone()), } } diff --git a/src/value.rs b/src/value.rs index fe00e7e9..c242c628 100644 --- a/src/value.rs +++ b/src/value.rs @@ -26,8 +26,8 @@ pub enum ValueKind { Array(Array), } -pub type Array = Vec; -pub type Table = Map; +pub(crate) type Array = Vec; +pub(crate) type Table = Map; impl Default for ValueKind { fn default() -> Self { @@ -151,29 +151,29 @@ where } impl Display for ValueKind { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use std::fmt::Write; match *self { - Self::String(ref value) => write!(f, "{}", value), - Self::Boolean(value) => write!(f, "{}", value), - Self::I64(value) => write!(f, "{}", value), - Self::I128(value) => write!(f, "{}", value), - Self::U64(value) => write!(f, "{}", value), - Self::U128(value) => write!(f, "{}", value), - Self::Float(value) => write!(f, "{}", value), + Self::String(ref value) => write!(f, "{value}"), + Self::Boolean(value) => write!(f, "{value}"), + Self::I64(value) => write!(f, "{value}"), + Self::I128(value) => write!(f, "{value}"), + Self::U64(value) => write!(f, "{value}"), + Self::U128(value) => write!(f, "{value}"), + Self::Float(value) => write!(f, "{value}"), Self::Nil => write!(f, "nil"), Self::Table(ref table) => { let mut s = String::new(); for (k, v) in table.iter() { - write!(s, "{} => {}, ", k, v)? + write!(s, "{k} => {v}, ")?; } write!(f, "{{ {s} }}") } Self::Array(ref array) => { let mut s = String::new(); for e in array.iter() { - write!(s, "{}, ", e)?; + write!(s, "{e}, ")?; } write!(f, "{s:?}") } @@ -193,7 +193,7 @@ pub struct Value { /// /// A Value originating from the environment would contain: /// ```text - /// the envrionment + /// the environment /// ``` /// /// A Value originating from a remote source might contain: @@ -718,7 +718,7 @@ impl<'de> Deserialize<'de> for Value { impl<'de> Visitor<'de> for ValueVisitor { type Value = Value; - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str("any valid configuration value") } @@ -786,7 +786,7 @@ impl<'de> Deserialize<'de> for Value { let num: i128 = value.try_into().map_err(|_| { E::invalid_type( ::serde::de::Unexpected::Other( - format!("integer `{}` as u128", value).as_str(), + format!("integer `{value}` as u128").as_str(), ), &self, ) @@ -875,7 +875,7 @@ where } impl Display for Value { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.kind) } } @@ -888,6 +888,7 @@ mod tests { use crate::FileFormat; #[test] + #[cfg(feature = "toml")] fn test_i64() { let c = Config::builder() .add_source(File::new("tests/types/i64.toml", FileFormat::Toml)) diff --git a/tests/async_builder.rs b/tests/async_builder.rs index b91a1a3b..985b0ec7 100644 --- a/tests/async_builder.rs +++ b/tests/async_builder.rs @@ -1,3 +1,7 @@ +#![cfg(feature = "async")] +#![cfg(feature = "json")] +#![cfg(feature = "toml")] + use async_trait::async_trait; use config::{AsyncSource, Config, ConfigError, FileFormat, Format, Map, Value}; use std::{env, fs, path, str::FromStr}; @@ -11,7 +15,7 @@ struct AsyncFile { /// This is a test only implementation to be used in tests impl AsyncFile { - pub fn new(path: String, format: FileFormat) -> Self { + pub(crate) fn new(path: String, format: FileFormat) -> Self { Self { path, format } } } diff --git a/tests/env.rs b/tests/env.rs index 12a68a92..4befa5bb 100644 --- a/tests/env.rs +++ b/tests/env.rs @@ -10,7 +10,7 @@ fn test_default() { let environment = Environment::default(); assert!(environment.collect().unwrap().contains_key("a_b_c")); - }) + }); } #[test] @@ -19,7 +19,7 @@ fn test_prefix_is_removed_from_key() { let environment = Environment::with_prefix("B"); assert!(environment.collect().unwrap().contains_key("a_c")); - }) + }); } #[test] @@ -49,7 +49,7 @@ fn test_separator_behavior() { let environment = Environment::with_prefix("C").separator("_"); assert!(environment.collect().unwrap().contains_key("b.a")); - }) + }); } #[test] @@ -58,7 +58,7 @@ fn test_empty_value_is_ignored() { let environment = Environment::default().ignore_empty(true); assert!(!environment.collect().unwrap().contains_key("c_a_b")); - }) + }); } #[test] @@ -77,7 +77,7 @@ fn test_keep_prefix() { let environment = Environment::with_prefix("C").keep_prefix(true); assert!(environment.collect().unwrap().contains_key("c_a_b")); - }) + }); } #[test] @@ -86,7 +86,7 @@ fn test_custom_separator_behavior() { let environment = Environment::with_prefix("C").separator("."); assert!(environment.collect().unwrap().contains_key("b.a")); - }) + }); } #[test] @@ -97,7 +97,7 @@ fn test_custom_prefix_separator_behavior() { .prefix_separator("-"); assert!(environment.collect().unwrap().contains_key("b.a")); - }) + }); } #[test] @@ -127,7 +127,7 @@ fn test_parse_int() { let config: TestIntEnum = config.try_deserialize().unwrap(); assert!(matches!(config, TestIntEnum::Int(TestInt { int_val: 42 }))); - }) + }); } #[test] @@ -160,7 +160,7 @@ fn test_parse_uint() { config, TestUintEnum::Uint(TestUint { int_val: 42 }) )); - }) + }); } #[test] @@ -192,10 +192,10 @@ fn test_parse_float() { // can't use `matches!` because of float value match config { TestFloatEnum::Float(TestFloat { float_val }) => { - assert!(float_cmp::approx_eq!(f64, float_val, 42.3)) + assert!(float_cmp::approx_eq!(f64, float_val, 42.3)); } } - }) + }); } #[test] @@ -228,7 +228,7 @@ fn test_parse_bool() { config, TestBoolEnum::Bool(TestBool { bool_val: true }) )); - }) + }); } #[test] @@ -238,6 +238,7 @@ fn test_parse_off_int() { #[derive(Deserialize, Debug)] #[serde(tag = "tag")] enum TestIntEnum { + #[allow(dead_code)] Int(TestInt), } @@ -258,7 +259,7 @@ fn test_parse_off_int() { .unwrap(); config.try_deserialize::().unwrap(); - }) + }); } #[test] @@ -268,6 +269,7 @@ fn test_parse_off_float() { #[derive(Deserialize, Debug)] #[serde(tag = "tag")] enum TestFloatEnum { + #[allow(dead_code)] Float(TestFloat), } @@ -288,7 +290,7 @@ fn test_parse_off_float() { .unwrap(); config.try_deserialize::().unwrap(); - }) + }); } #[test] @@ -298,6 +300,7 @@ fn test_parse_off_bool() { #[derive(Deserialize, Debug)] #[serde(tag = "tag")] enum TestBoolEnum { + #[allow(dead_code)] Bool(TestBool), } @@ -318,7 +321,7 @@ fn test_parse_off_bool() { .unwrap(); config.try_deserialize::().unwrap(); - }) + }); } #[test] @@ -328,6 +331,7 @@ fn test_parse_int_fail() { #[derive(Deserialize, Debug)] #[serde(tag = "tag")] enum TestIntEnum { + #[allow(dead_code)] Int(TestInt), } @@ -348,7 +352,7 @@ fn test_parse_int_fail() { .unwrap(); config.try_deserialize::().unwrap(); - }) + }); } #[test] @@ -358,6 +362,7 @@ fn test_parse_float_fail() { #[derive(Deserialize, Debug)] #[serde(tag = "tag")] enum TestFloatEnum { + #[allow(dead_code)] Float(TestFloat), } @@ -378,7 +383,7 @@ fn test_parse_float_fail() { .unwrap(); config.try_deserialize::().unwrap(); - }) + }); } #[test] @@ -388,6 +393,7 @@ fn test_parse_bool_fail() { #[derive(Deserialize, Debug)] #[serde(tag = "tag")] enum TestBoolEnum { + #[allow(dead_code)] Bool(TestBool), } @@ -408,7 +414,7 @@ fn test_parse_bool_fail() { .unwrap(); config.try_deserialize::().unwrap(); - }) + }); } #[test] @@ -460,7 +466,7 @@ fn test_parse_string_and_list() { } } }, - ) + ); } #[test] @@ -512,10 +518,11 @@ fn test_parse_string_and_list_ignore_list_parse_key_case() { } } }, - ) + ); } #[test] +#[cfg(feature = "convert-case")] fn test_parse_nested_kebab() { use config::Case; @@ -558,7 +565,7 @@ fn test_parse_nested_kebab() { let config = Config::builder().add_source(environment).build().unwrap(); - println!("{:#?}", config); + println!("{config:#?}"); let config: TestConfig = config.try_deserialize().unwrap(); @@ -567,7 +574,7 @@ fn test_parse_nested_kebab() { assert_eq!(config.value_with_multipart_name, "value1"); assert_eq!(config.inner_config.another_multipart_name, "value2"); }, - ) + ); } #[test] @@ -600,10 +607,10 @@ fn test_parse_string() { match config { TestStringEnum::String(TestString { string_val }) => { - assert_eq!(test_string, string_val) + assert_eq!(test_string, string_val); } } - }) + }); } #[test] @@ -636,10 +643,10 @@ fn test_parse_string_list() { match config { TestListEnum::StringList(TestList { string_list }) => { - assert_eq!(test_string, string_list) + assert_eq!(test_string, string_list); } } - }) + }); } #[test] @@ -675,7 +682,7 @@ fn test_parse_off_string() { assert_eq!(test_string, string_val_1); } } - }) + }); } #[test] diff --git a/tests/errors.rs b/tests/errors.rs index 1c3204c2..47110a51 100644 --- a/tests/errors.rs +++ b/tests/errors.rs @@ -45,6 +45,7 @@ fn test_error_type() { } #[test] +#[cfg(unix)] fn test_error_deser_whole() { #[derive(Deserialize, Debug)] struct Place { @@ -76,7 +77,7 @@ fn test_error_type_detached() { assert!(res.is_err()); assert_eq!( res.unwrap_err().to_string(), - "invalid type: string \"fals\", expected a boolean".to_string() + "invalid type: string \"fals\", expected a boolean".to_owned() ); } @@ -148,7 +149,7 @@ fn test_error_enum_de() { let on_d = on_v.try_deserialize::(); assert_eq!( on_d.unwrap_err().to_string(), - "enum Diode does not have variant constructor on".to_string() + "enum Diode does not have variant constructor on".to_owned() ); let array_v: Value = vec![100, 100].into(); @@ -159,8 +160,8 @@ fn test_error_enum_de() { ); let confused_v: Value = [ - ("Brightness".to_string(), 100.into()), - ("Blinking".to_string(), vec![300, 700].into()), + ("Brightness".to_owned(), 100.into()), + ("Blinking".to_owned(), vec![300, 700].into()), ] .iter() .cloned() @@ -218,7 +219,7 @@ fn test_error_root_not_table() { Err(e) => match e { ConfigError::FileParse { cause, .. } => assert_eq!( "invalid type: boolean `false`, expected a map", - format!("{}", cause) + format!("{cause}") ), _ => panic!("Wrong error: {:?}", e), }, diff --git a/tests/file.rs b/tests/file.rs index 9e4469ae..f9fb07f0 100644 --- a/tests/file.rs +++ b/tests/file.rs @@ -20,11 +20,12 @@ fn test_file_required_not_found() { assert!(res.is_err()); assert_eq!( res.unwrap_err().to_string(), - "configuration file \"tests/NoSettings\" not found".to_string() + "configuration file \"tests/NoSettings\" not found".to_owned() ); } #[test] +#[cfg(feature = "toml")] fn test_file_auto() { let c = Config::builder() .add_source(File::with_name("tests/Settings-production")) @@ -44,11 +45,12 @@ fn test_file_auto_not_found() { assert!(res.is_err()); assert_eq!( res.unwrap_err().to_string(), - "configuration file \"tests/NoSettings\" not found".to_string() + "configuration file \"tests/NoSettings\" not found".to_owned() ); } #[test] +#[cfg(feature = "json")] fn test_file_ext() { let c = Config::builder() .add_source(File::with_name("tests/Settings.json")) @@ -58,7 +60,9 @@ fn test_file_ext() { assert_eq!(c.get("debug").ok(), Some(true)); assert_eq!(c.get("production").ok(), Some(false)); } + #[test] +#[cfg(feature = "ini")] fn test_file_second_ext() { let c = Config::builder() .add_source(File::with_name("tests/Settings2.default")) diff --git a/tests/file_ini.rs b/tests/file_ini.rs index 7561bff5..dcc5e89a 100644 --- a/tests/file_ini.rs +++ b/tests/file_ini.rs @@ -101,14 +101,14 @@ fn test_override_uppercase_value_for_struct() { assert_ne!(v.FOO, "FOO should be overridden"); assert_eq!( lower_settings.foo, - "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_string() + "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() ); } Err(e) => { if e.to_string().contains("missing field `FOO`") { assert_eq!( lower_settings.foo, - "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_string() + "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() ); } else { panic!("{}", e); @@ -130,9 +130,9 @@ fn test_override_lowercase_value_for_struct() { let values: StructSettings = cfg.try_deserialize().unwrap(); assert_eq!( values.foo, - "I have been overridden_with_lower_case".to_string() + "I have been overridden_with_lower_case".to_owned() ); - assert_ne!(values.foo, "I am bar".to_string()); + assert_ne!(values.foo, "I am bar".to_owned()); } #[test] @@ -148,7 +148,7 @@ fn test_override_uppercase_value_for_enums() { assert_eq!( val, - EnumSettings::Bar("I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_string()) + EnumSettings::Bar("I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned()) ); } @@ -166,6 +166,6 @@ fn test_override_lowercase_value_for_enums() { assert_eq!( param, - EnumSettings::Bar("I have been overridden_with_lower_case".to_string()) + EnumSettings::Bar("I have been overridden_with_lower_case".to_owned()) ); } diff --git a/tests/file_json.rs b/tests/file_json.rs index 91cbc9bc..27ab61fe 100644 --- a/tests/file_json.rs +++ b/tests/file_json.rs @@ -43,7 +43,7 @@ fn test_file() { let s: Settings = c.try_deserialize().unwrap(); assert!(s.debug.approx_eq_ulps(&1.0, 2)); - assert_eq!(s.production, Some("false".to_string())); + assert_eq!(s.production, Some("false".to_owned())); assert_eq!(s.place.name, "Torre di Pisa"); assert!(s.place.longitude.approx_eq_ulps(&43.722_498_5, 2)); assert!(s.place.latitude.approx_eq_ulps(&10.397_052_2, 2)); @@ -52,15 +52,15 @@ fn test_file() { assert_eq!(s.place.rating, Some(4.5)); assert_eq!(s.place.telephone, None); assert_eq!(s.elements.len(), 10); - assert_eq!(s.elements[3], "4".to_string()); + assert_eq!(s.elements[3], "4".to_owned()); if cfg!(feature = "preserve_order") { assert_eq!( s.place .creator .into_iter() - .collect::>(), + .collect::>(), vec![ - ("name".to_string(), "John Smith".into()), + ("name".to_owned(), "John Smith".into()), ("username".into(), "jsmith".into()), ("email".into(), "jsmith@localhost".into()), ] @@ -68,7 +68,7 @@ fn test_file() { } else { assert_eq!( s.place.creator["name"].clone().into_string().unwrap(), - "John Smith".to_string() + "John Smith".to_owned() ); } } @@ -147,15 +147,15 @@ fn test_override_uppercase_value_for_struct() { assert_ne!(v.FOO, "FOO should be overridden"); assert_eq!( lower_settings.foo, - "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_string() + "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() ); } Err(e) => { if e.to_string().contains("missing field `FOO`") { - println!("triggered error {:?}", e); + println!("triggered error {e:?}"); assert_eq!( lower_settings.foo, - "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_string() + "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() ); } else { panic!("{}", e); @@ -177,9 +177,9 @@ fn test_override_lowercase_value_for_struct() { let values: StructSettings = cfg.try_deserialize().unwrap(); assert_eq!( values.foo, - "I have been overridden_with_lower_case".to_string() + "I have been overridden_with_lower_case".to_owned() ); - assert_ne!(values.foo, "I am bar".to_string()); + assert_ne!(values.foo, "I am bar".to_owned()); } #[test] @@ -195,7 +195,7 @@ fn test_override_uppercase_value_for_enums() { assert_eq!( val, - EnumSettings::Bar("I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_string()) + EnumSettings::Bar("I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned()) ); } @@ -213,6 +213,6 @@ fn test_override_lowercase_value_for_enums() { assert_eq!( param, - EnumSettings::Bar("I have been overridden_with_lower_case".to_string()) + EnumSettings::Bar("I have been overridden_with_lower_case".to_owned()) ); } diff --git a/tests/file_json5.rs b/tests/file_json5.rs index bdeb3eaf..f0c12676 100644 --- a/tests/file_json5.rs +++ b/tests/file_json5.rs @@ -42,7 +42,7 @@ fn test_file() { let s: Settings = c.try_deserialize().unwrap(); assert!(s.debug.approx_eq_ulps(&1.0, 2)); - assert_eq!(s.production, Some("false".to_string())); + assert_eq!(s.production, Some("false".to_owned())); assert_eq!(s.place.name, "Torre di Pisa"); assert!(s.place.longitude.approx_eq_ulps(&43.722_498_5, 2)); assert!(s.place.latitude.approx_eq_ulps(&10.397_052_2, 2)); @@ -51,15 +51,15 @@ fn test_file() { assert_eq!(s.place.rating, Some(4.5)); assert_eq!(s.place.telephone, None); assert_eq!(s.elements.len(), 10); - assert_eq!(s.elements[3], "4".to_string()); + assert_eq!(s.elements[3], "4".to_owned()); if cfg!(feature = "preserve_order") { assert_eq!( s.place .creator .into_iter() - .collect::>(), + .collect::>(), vec![ - ("name".to_string(), "John Smith".into()), + ("name".to_owned(), "John Smith".into()), ("username".into(), "jsmith".into()), ("email".into(), "jsmith@localhost".into()), ] @@ -67,7 +67,7 @@ fn test_file() { } else { assert_eq!( s.place.creator["name"].clone().into_string().unwrap(), - "John Smith".to_string() + "John Smith".to_owned() ); } } @@ -125,14 +125,14 @@ fn test_override_uppercase_value_for_struct() { assert_ne!(v.FOO, "FOO should be overridden"); assert_eq!( lower_settings.foo, - "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_string() + "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() ); } Err(e) => { if e.to_string().contains("missing field `FOO`") { assert_eq!( lower_settings.foo, - "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_string() + "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() ); } else { panic!("{}", e); @@ -154,9 +154,9 @@ fn test_override_lowercase_value_for_struct() { let values: StructSettings = cfg.try_deserialize().unwrap(); assert_eq!( values.foo, - "I have been overridden_with_lower_case".to_string() + "I have been overridden_with_lower_case".to_owned() ); - assert_ne!(values.foo, "I am bar".to_string()); + assert_ne!(values.foo, "I am bar".to_owned()); } #[test] @@ -172,7 +172,7 @@ fn test_override_uppercase_value_for_enums() { assert_eq!( val, - EnumSettings::Bar("I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_string()) + EnumSettings::Bar("I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned()) ); } @@ -190,6 +190,6 @@ fn test_override_lowercase_value_for_enums() { assert_eq!( param, - EnumSettings::Bar("I have been overridden_with_lower_case".to_string()) + EnumSettings::Bar("I have been overridden_with_lower_case".to_owned()) ); } diff --git a/tests/file_ron.rs b/tests/file_ron.rs index 9f1ea356..ae3ff973 100644 --- a/tests/file_ron.rs +++ b/tests/file_ron.rs @@ -44,7 +44,7 @@ fn test_file() { let s: Settings = c.try_deserialize().unwrap(); assert!(s.debug.approx_eq_ulps(&1.0, 2)); - assert_eq!(s.production, Some("false".to_string())); + assert_eq!(s.production, Some("false".to_owned())); assert_eq!(s.place.initials, ('T', 'P')); assert_eq!(s.place.name, "Torre di Pisa"); assert!(s.place.longitude.approx_eq_ulps(&43.722_498_5, 2)); @@ -54,15 +54,15 @@ fn test_file() { assert_eq!(s.place.rating, Some(4.5)); assert_eq!(s.place.telephone, None); assert_eq!(s.elements.len(), 10); - assert_eq!(s.elements[3], "4".to_string()); + assert_eq!(s.elements[3], "4".to_owned()); if cfg!(feature = "preserve_order") { assert_eq!( s.place .creator .into_iter() - .collect::>(), + .collect::>(), vec![ - ("name".to_string(), "John Smith".into()), + ("name".to_owned(), "John Smith".into()), ("username".into(), "jsmith".into()), ("email".into(), "jsmith@localhost".into()), ] @@ -70,7 +70,7 @@ fn test_file() { } else { assert_eq!( s.place.creator["name"].clone().into_string().unwrap(), - "John Smith".to_string() + "John Smith".to_owned() ); } } @@ -125,14 +125,14 @@ fn test_override_uppercase_value_for_struct() { assert_ne!(v.FOO, "FOO should be overridden"); assert_eq!( lower_settings.foo, - "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_string() + "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() ); } Err(e) => { if e.to_string().contains("missing field `FOO`") { assert_eq!( lower_settings.foo, - "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_string() + "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() ); } else { panic!("{}", e); @@ -154,9 +154,9 @@ fn test_override_lowercase_value_for_struct() { let values: StructSettings = cfg.try_deserialize().unwrap(); assert_eq!( values.foo, - "I have been overridden_with_lower_case".to_string() + "I have been overridden_with_lower_case".to_owned() ); - assert_ne!(values.foo, "I am bar".to_string()); + assert_ne!(values.foo, "I am bar".to_owned()); } #[test] @@ -172,7 +172,7 @@ fn test_override_uppercase_value_for_enums() { assert_eq!( val, - EnumSettings::Bar("I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_string()) + EnumSettings::Bar("I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned()) ); } @@ -190,6 +190,6 @@ fn test_override_lowercase_value_for_enums() { assert_eq!( param, - EnumSettings::Bar("I have been overridden_with_lower_case".to_string()) + EnumSettings::Bar("I have been overridden_with_lower_case".to_owned()) ); } diff --git a/tests/file_toml.rs b/tests/file_toml.rs index e0ec626e..2e909e4f 100644 --- a/tests/file_toml.rs +++ b/tests/file_toml.rs @@ -50,7 +50,7 @@ fn test_file() { let s: Settings = c.try_deserialize().unwrap(); assert!(s.debug.approx_eq_ulps(&1.0, 2)); - assert_eq!(s.production, Some("false".to_string())); + assert_eq!(s.production, Some("false".to_owned())); assert_eq!(s.code, AsciiCode(53)); assert_eq!(s.place.number, PlaceNumber(1)); assert_eq!(s.place.name, "Torre di Pisa"); @@ -61,15 +61,15 @@ fn test_file() { assert_eq!(s.place.rating, Some(4.5)); assert_eq!(s.place.telephone, None); assert_eq!(s.elements.len(), 10); - assert_eq!(s.elements[3], "4".to_string()); + assert_eq!(s.elements[3], "4".to_owned()); if cfg!(feature = "preserve_order") { assert_eq!( s.place .creator .into_iter() - .collect::>(), + .collect::>(), vec![ - ("name".to_string(), "John Smith".into()), + ("name".to_owned(), "John Smith".into()), ("username".into(), "jsmith".into()), ("email".into(), "jsmith@localhost".into()), ] @@ -77,7 +77,7 @@ fn test_file() { } else { assert_eq!( s.place.creator["name"].clone().into_string().unwrap(), - "John Smith".to_string() + "John Smith".to_owned() ); } } @@ -129,14 +129,14 @@ fn test_override_uppercase_value_for_struct() { assert_ne!(v.FOO, "FOO should be overridden"); assert_eq!( lower_settings.foo, - "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_string() + "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() ); } Err(e) => { if e.to_string().contains("missing field `FOO`") { assert_eq!( lower_settings.foo, - "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_string() + "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() ); } else { panic!("{}", e); @@ -158,9 +158,9 @@ fn test_override_lowercase_value_for_struct() { let values: StructSettings = cfg.try_deserialize().unwrap(); assert_eq!( values.bar, - "I have been overridden_with_lower_case".to_string() + "I have been overridden_with_lower_case".to_owned() ); - assert_ne!(values.bar, "I am bar".to_string()); + assert_ne!(values.bar, "I am bar".to_owned()); } #[test] @@ -177,7 +177,7 @@ fn test_override_uppercase_value_for_enums() { assert_eq!( values, - EnumSettings::Bar("I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_string()) + EnumSettings::Bar("I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned()) ); } @@ -195,6 +195,6 @@ fn test_override_lowercase_value_for_enums() { assert_eq!( values, - EnumSettings::Bar("I have been overridden_with_lower_case".to_string()) + EnumSettings::Bar("I have been overridden_with_lower_case".to_owned()) ); } diff --git a/tests/file_yaml.rs b/tests/file_yaml.rs index 80cf461c..751dda68 100644 --- a/tests/file_yaml.rs +++ b/tests/file_yaml.rs @@ -43,7 +43,7 @@ fn test_file() { let s: Settings = c.try_deserialize().unwrap(); assert!(s.debug.approx_eq_ulps(&1.0, 2)); - assert_eq!(s.production, Some("false".to_string())); + assert_eq!(s.production, Some("false".to_owned())); assert_eq!(s.place.name, "Torre di Pisa"); assert!(s.place.longitude.approx_eq_ulps(&43.722_498_5, 2)); assert!(s.place.latitude.approx_eq_ulps(&10.397_052_2, 2)); @@ -52,15 +52,15 @@ fn test_file() { assert_eq!(s.place.rating, Some(4.5)); assert_eq!(s.place.telephone, None); assert_eq!(s.elements.len(), 10); - assert_eq!(s.elements[3], "4".to_string()); + assert_eq!(s.elements[3], "4".to_owned()); if cfg!(feature = "preserve_order") { assert_eq!( s.place .creator .into_iter() - .collect::>(), + .collect::>(), vec![ - ("name".to_string(), "John Smith".into()), + ("name".to_owned(), "John Smith".into()), ("username".into(), "jsmith".into()), ("email".into(), "jsmith@localhost".into()), ] @@ -68,12 +68,13 @@ fn test_file() { } else { assert_eq!( s.place.creator["name"].clone().into_string().unwrap(), - "John Smith".to_string() + "John Smith".to_owned() ); } } #[test] +#[cfg(unix)] fn test_error_parse() { let res = Config::builder() .add_source(File::new("tests/Settings-invalid", FileFormat::Yaml)) @@ -155,15 +156,15 @@ fn test_override_uppercase_value_for_struct() { assert_ne!(v.FOO, "FOO should be overridden"); assert_eq!( lower_settings.foo, - "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_string() + "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() ); } Err(e) => { if e.to_string().contains("missing field `FOO`") { - println!("triggered error {:?}", e); + println!("triggered error {e:?}"); assert_eq!( lower_settings.foo, - "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_string() + "I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned() ); } else { panic!("{}", e); @@ -185,9 +186,9 @@ fn test_override_lowercase_value_for_struct() { let values: StructSettings = cfg.try_deserialize().unwrap(); assert_eq!( values.bar, - "I have been overridden_with_lower_case".to_string() + "I have been overridden_with_lower_case".to_owned() ); - assert_ne!(values.bar, "I am bar".to_string()); + assert_ne!(values.bar, "I am bar".to_owned()); } #[test] @@ -203,7 +204,7 @@ fn test_override_uppercase_value_for_enums() { assert_eq!( values, - EnumSettings::Bar("I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_string()) + EnumSettings::Bar("I HAVE BEEN OVERRIDDEN_WITH_UPPER_CASE".to_owned()) ); } @@ -221,6 +222,6 @@ fn test_override_lowercase_value_for_enums() { assert_eq!( values, - EnumSettings::Bar("I have been overridden_with_lower_case".to_string()) + EnumSettings::Bar("I have been overridden_with_lower_case".to_owned()) ); } diff --git a/tests/get.rs b/tests/get.rs index fc532073..83bc76bc 100644 --- a/tests/get.rs +++ b/tests/get.rs @@ -40,7 +40,7 @@ fn test_not_found() { assert!(res.is_err()); assert_eq!( res.unwrap_err().to_string(), - "configuration property \"not_found\" not found".to_string() + "configuration property \"not_found\" not found".to_owned() ); } @@ -57,22 +57,22 @@ fn test_scalar_type_loose() { let c = make(); assert_eq!(c.get("debug").ok(), Some(true)); - assert_eq!(c.get("debug").ok(), Some("true".to_string())); + assert_eq!(c.get("debug").ok(), Some("true".to_owned())); assert_eq!(c.get("debug").ok(), Some(1)); assert_eq!(c.get("debug").ok(), Some(1.0)); assert_eq!(c.get("debug_s").ok(), Some(true)); - assert_eq!(c.get("debug_s").ok(), Some("true".to_string())); + assert_eq!(c.get("debug_s").ok(), Some("true".to_owned())); assert_eq!(c.get("debug_s").ok(), Some(1)); assert_eq!(c.get("debug_s").ok(), Some(1.0)); assert_eq!(c.get("production").ok(), Some(false)); - assert_eq!(c.get("production").ok(), Some("false".to_string())); + assert_eq!(c.get("production").ok(), Some("false".to_owned())); assert_eq!(c.get("production").ok(), Some(0)); assert_eq!(c.get("production").ok(), Some(0.0)); assert_eq!(c.get("production_s").ok(), Some(false)); - assert_eq!(c.get("production_s").ok(), Some("false".to_string())); + assert_eq!(c.get("production_s").ok(), Some("false".to_owned())); assert_eq!(c.get("production_s").ok(), Some(0)); assert_eq!(c.get("production_s").ok(), Some(0.0)); } @@ -84,7 +84,7 @@ fn test_get_scalar_path() { assert_eq!(c.get("place.favorite").ok(), Some(false)); assert_eq!( c.get("place.creator.name").ok(), - Some("John Smith".to_string()) + Some("John Smith".to_owned()) ); } @@ -93,10 +93,10 @@ fn test_get_scalar_path_subscript() { let c = make(); assert_eq!(c.get("arr[2]").ok(), Some(3)); - assert_eq!(c.get("items[0].name").ok(), Some("1".to_string())); - assert_eq!(c.get("items[1].name").ok(), Some("2".to_string())); - assert_eq!(c.get("items[-1].name").ok(), Some("2".to_string())); - assert_eq!(c.get("items[-2].name").ok(), Some("1".to_string())); + assert_eq!(c.get("items[0].name").ok(), Some("1".to_owned())); + assert_eq!(c.get("items[1].name").ok(), Some("2".to_owned())); + assert_eq!(c.get("items[-1].name").ok(), Some("2".to_owned())); + assert_eq!(c.get("items[-2].name").ok(), Some("1".to_owned())); } #[test] @@ -107,7 +107,7 @@ fn test_map() { assert_eq!(m.len(), 8); assert_eq!( m["name"].clone().into_string().unwrap(), - "Torre di Pisa".to_string() + "Torre di Pisa".to_owned() ); assert_eq!(m["reviews"].clone().into_int().unwrap(), 3866); } @@ -121,14 +121,14 @@ fn test_map_str() { assert_eq!( m.into_iter().collect::>(), vec![ - ("name".to_string(), "John Smith".to_string()), - ("username".to_string(), "jsmith".to_string()), - ("email".to_string(), "jsmith@localhost".to_string()), + ("name".to_owned(), "John Smith".to_owned()), + ("username".to_owned(), "jsmith".to_owned()), + ("email".to_owned(), "jsmith@localhost".to_owned()), ] ); } else { assert_eq!(m.len(), 3); - assert_eq!(m["name"], "John Smith".to_string()); + assert_eq!(m["name"], "John Smith".to_owned()); } } @@ -145,7 +145,7 @@ fn test_map_struct() { assert_eq!(s.place.len(), 8); assert_eq!( s.place["name"].clone().into_string().unwrap(), - "Torre di Pisa".to_string() + "Torre di Pisa".to_owned() ); assert_eq!(s.place["reviews"].clone().into_int().unwrap(), 3866); } @@ -158,7 +158,7 @@ fn test_file_struct() { let s: Settings = c.try_deserialize().unwrap(); assert!(s.debug.approx_eq_ulps(&1.0, 2)); - assert_eq!(s.production, Some("false".to_string())); + assert_eq!(s.production, Some("false".to_owned())); assert_eq!(s.place.name, "Torre di Pisa"); assert!(s.place.longitude.approx_eq_ulps(&43.722_498_5, 2)); assert!(s.place.latitude.approx_eq_ulps(&10.397_052_2, 2)); @@ -206,7 +206,7 @@ fn test_struct_array() { let s: Settings = c.try_deserialize().unwrap(); assert_eq!(s.elements.len(), 10); - assert_eq!(s.elements[3], "4".to_string()); + assert_eq!(s.elements[3], "4".to_owned()); } #[test] diff --git a/tests/integer_range.rs b/tests/integer_range.rs index e80a2f2f..42292ea0 100644 --- a/tests/integer_range.rs +++ b/tests/integer_range.rs @@ -1,3 +1,5 @@ +#![cfg(feature = "toml")] + use config::Config; #[test] diff --git a/tests/legacy/datetime.rs b/tests/legacy/datetime.rs index 7e63e689..7901d442 100644 --- a/tests/legacy/datetime.rs +++ b/tests/legacy/datetime.rs @@ -1,14 +1,13 @@ #![cfg(all( feature = "toml", feature = "json", - feature = "hjson", feature = "yaml", feature = "ini", feature = "ron", ))] -use self::chrono::{DateTime, TimeZone, Utc}; -use self::config::*; +use chrono::{DateTime, TimeZone, Utc}; +use config::*; fn make() -> Config { Config::default() @@ -35,15 +34,6 @@ fn make() -> Config { FileFormat::Toml, )) .unwrap() - .merge(File::from_str( - r#" - { - "hjson_datetime": "2017-05-10T02:14:53Z" - } - "#, - FileFormat::Hjson, - )) - .unwrap() .merge(File::from_str( r#" ini_datetime = 2017-05-10T02:14:53Z @@ -82,11 +72,6 @@ fn test_datetime_string() { assert_eq!(&date, "2017-06-12T10:58:30Z"); - // HJSON - let date: String = s.get("hjson_datetime").unwrap(); - - assert_eq!(&date, "2017-05-10T02:14:53Z"); - // INI let date: String = s.get("ini_datetime").unwrap(); @@ -117,11 +102,6 @@ fn test_datetime() { assert_eq!(date, Utc.ymd(2017, 6, 12).and_hms(10, 58, 30)); - // HJSON - let date: DateTime = s.get("hjson_datetime").unwrap(); - - assert_eq!(date, Utc.ymd(2017, 5, 10).and_hms(2, 14, 53)); - // INI let date: DateTime = s.get("ini_datetime").unwrap(); diff --git a/tests/legacy/errors.rs b/tests/legacy/errors.rs index 191e85eb..ca514e81 100644 --- a/tests/legacy/errors.rs +++ b/tests/legacy/errors.rs @@ -42,7 +42,7 @@ fn test_error_type_detached() { assert!(res.is_err()); assert_eq!( res.unwrap_err().to_string(), - "invalid type: string \"fals\", expected a boolean".to_string() + "invalid type: string \"fals\", expected a boolean".to_owned() ); } @@ -60,7 +60,7 @@ fn test_error_enum_de() { let on_d = on_v.try_deserialize::(); assert_eq!( on_d.unwrap_err().to_string(), - "enum Diode does not have variant constructor on".to_string() + "enum Diode does not have variant constructor on".to_owned() ); let array_v: Value = vec![100, 100].into(); @@ -71,8 +71,8 @@ fn test_error_enum_de() { ); let confused_v: Value = [ - ("Brightness".to_string(), 100.into()), - ("Blinking".to_string(), vec![300, 700].into()), + ("Brightness".to_owned(), 100.into()), + ("Blinking".to_owned(), vec![300, 700].into()), ] .iter() .cloned() diff --git a/tests/legacy/file.rs b/tests/legacy/file.rs index 59006465..a4ef41c7 100644 --- a/tests/legacy/file.rs +++ b/tests/legacy/file.rs @@ -18,11 +18,12 @@ fn test_file_required_not_found() { assert!(res.is_err()); assert_eq!( res.unwrap_err().to_string(), - "configuration file \"tests/NoSettings\" not found".to_string() + "configuration file \"tests/NoSettings\" not found".to_owned() ); } #[test] +#[cfg(feature = "toml")] fn test_file_auto() { let mut c = Config::default(); c.merge(File::with_name("tests/Settings-production")) @@ -40,11 +41,12 @@ fn test_file_auto_not_found() { assert!(res.is_err()); assert_eq!( res.unwrap_err().to_string(), - "configuration file \"tests/NoSettings\" not found".to_string() + "configuration file \"tests/NoSettings\" not found".to_owned() ); } #[test] +#[cfg(feature = "json")] fn test_file_ext() { let mut c = Config::default(); c.merge(File::with_name("tests/Settings.json")).unwrap(); diff --git a/tests/legacy/file_json.rs b/tests/legacy/file_json.rs index 72fd5ebb..43c6fdc5 100644 --- a/tests/legacy/file_json.rs +++ b/tests/legacy/file_json.rs @@ -44,7 +44,7 @@ fn test_file() { let s: Settings = c.try_deserialize().unwrap(); assert!(s.debug.approx_eq_ulps(&1.0, 2)); - assert_eq!(s.production, Some("false".to_string())); + assert_eq!(s.production, Some("false".to_owned())); assert_eq!(s.place.name, "Torre di Pisa"); assert!(s.place.longitude.approx_eq_ulps(&43.722_498_5, 2)); assert!(s.place.latitude.approx_eq_ulps(&10.397_052_2, 2)); @@ -53,15 +53,15 @@ fn test_file() { assert_eq!(s.place.rating, Some(4.5)); assert_eq!(s.place.telephone, None); assert_eq!(s.elements.len(), 10); - assert_eq!(s.elements[3], "4".to_string()); + assert_eq!(s.elements[3], "4".to_owned()); if cfg!(feature = "preserve_order") { assert_eq!( s.place .creator .into_iter() - .collect::>(), + .collect::>(), vec![ - ("name".to_string(), "John Smith".into()), + ("name".to_owned(), "John Smith".into()), ("username".into(), "jsmith".into()), ("email".into(), "jsmith@localhost".into()), ] @@ -69,7 +69,7 @@ fn test_file() { } else { assert_eq!( s.place.creator["name"].clone().into_string().unwrap(), - "John Smith".to_string() + "John Smith".to_owned() ); } } diff --git a/tests/legacy/file_ron.rs b/tests/legacy/file_ron.rs index 7f31c0e9..1f069f32 100644 --- a/tests/legacy/file_ron.rs +++ b/tests/legacy/file_ron.rs @@ -44,7 +44,7 @@ fn test_file() { let s: Settings = c.try_deserialize().unwrap(); assert!(s.debug.approx_eq_ulps(&1.0, 2)); - assert_eq!(s.production, Some("false".to_string())); + assert_eq!(s.production, Some("false".to_owned())); assert_eq!(s.place.initials, ('T', 'P')); assert_eq!(s.place.name, "Torre di Pisa"); assert!(s.place.longitude.approx_eq_ulps(&43.722_498_5, 2)); @@ -54,15 +54,15 @@ fn test_file() { assert_eq!(s.place.rating, Some(4.5)); assert_eq!(s.place.telephone, None); assert_eq!(s.elements.len(), 10); - assert_eq!(s.elements[3], "4".to_string()); + assert_eq!(s.elements[3], "4".to_owned()); if cfg!(feature = "preserve_order") { assert_eq!( s.place .creator .into_iter() - .collect::>(), + .collect::>(), vec![ - ("name".to_string(), "John Smith".into()), + ("name".to_owned(), "John Smith".into()), ("username".into(), "jsmith".into()), ("email".into(), "jsmith@localhost".into()), ] @@ -70,7 +70,7 @@ fn test_file() { } else { assert_eq!( s.place.creator["name"].clone().into_string().unwrap(), - "John Smith".to_string() + "John Smith".to_owned() ); } } diff --git a/tests/legacy/file_toml.rs b/tests/legacy/file_toml.rs index 2ecdb01d..a26f25ce 100644 --- a/tests/legacy/file_toml.rs +++ b/tests/legacy/file_toml.rs @@ -51,7 +51,7 @@ fn test_file() { let s: Settings = c.try_deserialize().unwrap(); assert!(s.debug.approx_eq_ulps(&1.0, 2)); - assert_eq!(s.production, Some("false".to_string())); + assert_eq!(s.production, Some("false".to_owned())); assert_eq!(s.code, AsciiCode(53)); assert_eq!(s.place.number, PlaceNumber(1)); assert_eq!(s.place.name, "Torre di Pisa"); @@ -62,15 +62,15 @@ fn test_file() { assert_eq!(s.place.rating, Some(4.5)); assert_eq!(s.place.telephone, None); assert_eq!(s.elements.len(), 10); - assert_eq!(s.elements[3], "4".to_string()); + assert_eq!(s.elements[3], "4".to_owned()); if cfg!(feature = "preserve_order") { assert_eq!( s.place .creator .into_iter() - .collect::>(), + .collect::>(), vec![ - ("name".to_string(), "John Smith".into()), + ("name".to_owned(), "John Smith".into()), ("username".into(), "jsmith".into()), ("email".into(), "jsmith@localhost".into()), ] @@ -78,7 +78,7 @@ fn test_file() { } else { assert_eq!( s.place.creator["name"].clone().into_string().unwrap(), - "John Smith".to_string() + "John Smith".to_owned() ); } } diff --git a/tests/legacy/file_yaml.rs b/tests/legacy/file_yaml.rs index 9d438a35..baefc224 100644 --- a/tests/legacy/file_yaml.rs +++ b/tests/legacy/file_yaml.rs @@ -44,7 +44,7 @@ fn test_file() { let s: Settings = c.try_deserialize().unwrap(); assert!(s.debug.approx_eq_ulps(&1.0, 2)); - assert_eq!(s.production, Some("false".to_string())); + assert_eq!(s.production, Some("false".to_owned())); assert_eq!(s.place.name, "Torre di Pisa"); assert!(s.place.longitude.approx_eq_ulps(&43.722_498_5, 2)); assert!(s.place.latitude.approx_eq_ulps(&10.397_052_2, 2)); @@ -53,15 +53,15 @@ fn test_file() { assert_eq!(s.place.rating, Some(4.5)); assert_eq!(s.place.telephone, None); assert_eq!(s.elements.len(), 10); - assert_eq!(s.elements[3], "4".to_string()); + assert_eq!(s.elements[3], "4".to_owned()); if cfg!(feature = "preserve_order") { assert_eq!( s.place .creator .into_iter() - .collect::>(), + .collect::>(), vec![ - ("name".to_string(), "John Smith".into()), + ("name".to_owned(), "John Smith".into()), ("username".into(), "jsmith".into()), ("email".into(), "jsmith@localhost".into()), ] @@ -69,12 +69,13 @@ fn test_file() { } else { assert_eq!( s.place.creator["name"].clone().into_string().unwrap(), - "John Smith".to_string() + "John Smith".to_owned() ); } } #[test] +#[cfg(unix)] fn test_error_parse() { let mut c = Config::default(); let res = c.merge(File::new("tests/Settings-invalid", FileFormat::Yaml)); diff --git a/tests/legacy/get.rs b/tests/legacy/get.rs index 837ad9e1..e073a51b 100644 --- a/tests/legacy/get.rs +++ b/tests/legacy/get.rs @@ -40,7 +40,7 @@ fn test_not_found() { assert!(res.is_err()); assert_eq!( res.unwrap_err().to_string(), - "configuration property \"not_found\" not found".to_string() + "configuration property \"not_found\" not found".to_owned() ); } @@ -57,22 +57,22 @@ fn test_scalar_type_loose() { let c = make(); assert_eq!(c.get("debug").ok(), Some(true)); - assert_eq!(c.get("debug").ok(), Some("true".to_string())); + assert_eq!(c.get("debug").ok(), Some("true".to_owned())); assert_eq!(c.get("debug").ok(), Some(1)); assert_eq!(c.get("debug").ok(), Some(1.0)); assert_eq!(c.get("debug_s").ok(), Some(true)); - assert_eq!(c.get("debug_s").ok(), Some("true".to_string())); + assert_eq!(c.get("debug_s").ok(), Some("true".to_owned())); assert_eq!(c.get("debug_s").ok(), Some(1)); assert_eq!(c.get("debug_s").ok(), Some(1.0)); assert_eq!(c.get("production").ok(), Some(false)); - assert_eq!(c.get("production").ok(), Some("false".to_string())); + assert_eq!(c.get("production").ok(), Some("false".to_owned())); assert_eq!(c.get("production").ok(), Some(0)); assert_eq!(c.get("production").ok(), Some(0.0)); assert_eq!(c.get("production_s").ok(), Some(false)); - assert_eq!(c.get("production_s").ok(), Some("false".to_string())); + assert_eq!(c.get("production_s").ok(), Some("false".to_owned())); assert_eq!(c.get("production_s").ok(), Some(0)); assert_eq!(c.get("production_s").ok(), Some(0.0)); } @@ -84,7 +84,7 @@ fn test_get_scalar_path() { assert_eq!(c.get("place.favorite").ok(), Some(false)); assert_eq!( c.get("place.creator.name").ok(), - Some("John Smith".to_string()) + Some("John Smith".to_owned()) ); } @@ -93,10 +93,10 @@ fn test_get_scalar_path_subscript() { let c = make(); assert_eq!(c.get("arr[2]").ok(), Some(3)); - assert_eq!(c.get("items[0].name").ok(), Some("1".to_string())); - assert_eq!(c.get("items[1].name").ok(), Some("2".to_string())); - assert_eq!(c.get("items[-1].name").ok(), Some("2".to_string())); - assert_eq!(c.get("items[-2].name").ok(), Some("1".to_string())); + assert_eq!(c.get("items[0].name").ok(), Some("1".to_owned())); + assert_eq!(c.get("items[1].name").ok(), Some("2".to_owned())); + assert_eq!(c.get("items[-1].name").ok(), Some("2".to_owned())); + assert_eq!(c.get("items[-2].name").ok(), Some("1".to_owned())); } #[test] @@ -107,7 +107,7 @@ fn test_map() { assert_eq!(m.len(), 8); assert_eq!( m["name"].clone().into_string().unwrap(), - "Torre di Pisa".to_string() + "Torre di Pisa".to_owned() ); assert_eq!(m["reviews"].clone().into_int().unwrap(), 3866); } @@ -121,14 +121,14 @@ fn test_map_str() { assert_eq!( m.into_iter().collect::>(), vec![ - ("name".to_string(), "John Smith".to_string()), - ("username".to_string(), "jsmith".to_string()), - ("email".to_string(), "jsmith@localhost".to_string()), + ("name".to_owned(), "John Smith".to_owned()), + ("username".to_owned(), "jsmith".to_owned()), + ("email".to_owned(), "jsmith@localhost".to_owned()), ] ); } else { assert_eq!(m.len(), 3); - assert_eq!(m["name"], "John Smith".to_string()); + assert_eq!(m["name"], "John Smith".to_owned()); } } @@ -145,7 +145,7 @@ fn test_map_struct() { assert_eq!(s.place.len(), 8); assert_eq!( s.place["name"].clone().into_string().unwrap(), - "Torre di Pisa".to_string() + "Torre di Pisa".to_owned() ); assert_eq!(s.place["reviews"].clone().into_int().unwrap(), 3866); } @@ -158,7 +158,7 @@ fn test_file_struct() { let s: Settings = c.try_deserialize().unwrap(); assert!(s.debug.approx_eq_ulps(&1.0, 2)); - assert_eq!(s.production, Some("false".to_string())); + assert_eq!(s.production, Some("false".to_owned())); assert_eq!(s.place.name, "Torre di Pisa"); assert!(s.place.longitude.approx_eq_ulps(&43.722_498_5, 2)); assert!(s.place.latitude.approx_eq_ulps(&10.397_052_2, 2)); @@ -206,7 +206,7 @@ fn test_struct_array() { let s: Settings = c.try_deserialize().unwrap(); assert_eq!(s.elements.len(), 10); - assert_eq!(s.elements[3], "4".to_string()); + assert_eq!(s.elements[3], "4".to_owned()); } #[test] diff --git a/tests/legacy/merge.rs b/tests/legacy/merge.rs index 74463d66..fca5e168 100644 --- a/tests/legacy/merge.rs +++ b/tests/legacy/merge.rs @@ -26,15 +26,15 @@ fn test_merge() { assert_eq!( m.into_iter().collect::>(), vec![ - ("name".to_string(), "Somebody New".to_string()), - ("username".to_string(), "jsmith".to_string()), - ("email".to_string(), "jsmith@localhost".to_string()), + ("name".to_owned(), "Somebody New".to_owned()), + ("username".to_owned(), "jsmith".to_owned()), + ("email".to_owned(), "jsmith@localhost".to_owned()), ] ); } else { assert_eq!( c.get("place.creator.name").ok(), - Some("Somebody New".to_string()) + Some("Somebody New".to_owned()) ); } } diff --git a/tests/legacy/set.rs b/tests/legacy/set.rs index 169e462a..a16241c1 100644 --- a/tests/legacy/set.rs +++ b/tests/legacy/set.rs @@ -50,16 +50,13 @@ fn test_set_arr_path() { c.set("items[0].name", "Ivan").unwrap(); - assert_eq!(c.get("items[0].name").ok(), Some("Ivan".to_string())); + assert_eq!(c.get("items[0].name").ok(), Some("Ivan".to_owned())); c.set("data[0].things[1].name", "foo").unwrap(); c.set("data[0].things[1].value", 42).unwrap(); c.set("data[1]", 0).unwrap(); - assert_eq!( - c.get("data[0].things[1].name").ok(), - Some("foo".to_string()) - ); + assert_eq!(c.get("data[0].things[1].name").ok(), Some("foo".to_owned())); assert_eq!(c.get("data[0].things[1].value").ok(), Some(42)); assert_eq!(c.get("data[1]").ok(), Some(0)); @@ -68,11 +65,11 @@ fn test_set_arr_path() { c.set("items[0].name", "John").unwrap(); - assert_eq!(c.get("items[0].name").ok(), Some("John".to_string())); + assert_eq!(c.get("items[0].name").ok(), Some("John".to_owned())); c.set("items[2]", "George").unwrap(); - assert_eq!(c.get("items[2]").ok(), Some("George".to_string())); + assert_eq!(c.get("items[2]").ok(), Some("George".to_owned())); } #[cfg(feature = "toml")] diff --git a/tests/log.rs b/tests/log.rs index a468adaa..082c90f4 100644 --- a/tests/log.rs +++ b/tests/log.rs @@ -1,9 +1,7 @@ -extern crate config; -extern crate log; +#![cfg(feature = "json")] #[macro_use] extern crate serde_derive; -extern crate serde_json; use config::*; @@ -37,6 +35,7 @@ fn test_case_sensitivity_log_level_from_str() { } #[test] +#[cfg(feature = "json")] fn test_case_sensitivity_json_from_str() { // to confirm serde_json works as expected let s = r#"{ "log": "error" }"#; diff --git a/tests/merge.rs b/tests/merge.rs index 0469064f..8030bec8 100644 --- a/tests/merge.rs +++ b/tests/merge.rs @@ -23,15 +23,15 @@ fn test_merge() { assert_eq!( m.into_iter().collect::>(), vec![ - ("name".to_string(), "Somebody New".to_string()), - ("username".to_string(), "jsmith".to_string()), - ("email".to_string(), "jsmith@localhost".to_string()), + ("name".to_owned(), "Somebody New".to_owned()), + ("username".to_owned(), "jsmith".to_owned()), + ("email".to_owned(), "jsmith@localhost".to_owned()), ] ); } else { assert_eq!( c.get("place.creator.name").ok(), - Some("Somebody New".to_string()) + Some("Somebody New".to_owned()) ); } } diff --git a/tests/ron_enum.rs b/tests/ron_enum.rs index ee0264cf..fa0d96da 100644 --- a/tests/ron_enum.rs +++ b/tests/ron_enum.rs @@ -1,3 +1,5 @@ +#![cfg(feature = "ron")] + use config::{Config, File, FileFormat}; use serde_derive::Deserialize; diff --git a/tests/set.rs b/tests/set.rs index 59b5b849..0a5ac8ed 100644 --- a/tests/set.rs +++ b/tests/set.rs @@ -63,14 +63,14 @@ fn test_set_arr_path() { .build() .unwrap(); - assert_eq!(config.get("items[0].name").ok(), Some("Ivan".to_string())); + assert_eq!(config.get("items[0].name").ok(), Some("Ivan".to_owned())); assert_eq!( config.get("data[0].things[1].name").ok(), - Some("foo".to_string()) + Some("foo".to_owned()) ); assert_eq!(config.get("data[0].things[1].value").ok(), Some(42)); assert_eq!(config.get("data[1]").ok(), Some(0)); - assert_eq!(config.get("items[2]").ok(), Some("George".to_string())); + assert_eq!(config.get("items[2]").ok(), Some("George".to_owned())); } #[cfg(feature = "toml")] diff --git a/tests/unsigned_int.rs b/tests/unsigned_int.rs index e870c8a1..0022314a 100644 --- a/tests/unsigned_int.rs +++ b/tests/unsigned_int.rs @@ -1,13 +1,11 @@ #![cfg(feature = "preserve_order")] -extern crate indexmap; - -#[derive(serde::Deserialize, Eq, PartialEq, Debug)] +#[derive(serde_derive::Deserialize, Eq, PartialEq, Debug)] struct Container { inner: T, } -#[derive(serde::Deserialize, Eq, PartialEq, Debug)] +#[derive(serde_derive::Deserialize, Eq, PartialEq, Debug)] struct Unsigned { unsigned: u16, } @@ -22,7 +20,7 @@ impl From for config::ValueKind { fn from(unsigned: Unsigned) -> Self { let mut properties = indexmap::IndexMap::new(); properties.insert( - "unsigned".to_string(), + "unsigned".to_owned(), config::Value::from(unsigned.unsigned), ); diff --git a/tests/unsigned_int_hm.rs b/tests/unsigned_int_hm.rs index ab2a60cc..87d9a99a 100644 --- a/tests/unsigned_int_hm.rs +++ b/tests/unsigned_int_hm.rs @@ -1,11 +1,11 @@ #![cfg(not(feature = "preserve_order"))] -#[derive(serde::Deserialize, Eq, PartialEq, Debug)] +#[derive(serde_derive::Deserialize, Eq, PartialEq, Debug)] struct Container { inner: T, } -#[derive(serde::Deserialize, Eq, PartialEq, Debug)] +#[derive(serde_derive::Deserialize, Eq, PartialEq, Debug)] struct Unsigned { unsigned: u16, } diff --git a/tests/weird_keys.rs b/tests/weird_keys.rs index c997fe05..ef1749d1 100644 --- a/tests/weird_keys.rs +++ b/tests/weird_keys.rs @@ -4,6 +4,9 @@ // Please don't be offended! // +#![cfg(feature = "json")] +#![cfg(feature = "toml")] + use serde_derive::{Deserialize, Serialize}; use config::{File, FileFormat};