diff --git a/.github/workflows/count_loc.yml b/.github/workflows/count_loc.yml new file mode 100644 index 0000000..65808f7 --- /dev/null +++ b/.github/workflows/count_loc.yml @@ -0,0 +1,25 @@ +name: Count lines of code for the project, and upload to the badge store + +on: + push: + branches: + - 'master' + +jobs: + count-loc-and-upload: + runs-on: ubuntu-latest + permissions: + contents: read + steps: + - uses: actions/checkout@v3 + - id: loc + name: Count lines of code + uses: Sh1nku/count-loc-action@v1 + with: + excluded: "*.xml,*.toml,*.yml" + - uses: Sh1nku/badgestore-update-badge-action@v1 + name: Update badge + id: badge + with: + right-label: ${{ steps.loc.outputs.Total_code_abbreviated }} + read-write-key: ${{ secrets.LOC_COUNT_BADGE_RW_KEY }} \ No newline at end of file diff --git a/.github/workflows/python_publish.yml b/.github/workflows/python_publish.yml new file mode 100644 index 0000000..b3f86d7 --- /dev/null +++ b/.github/workflows/python_publish.yml @@ -0,0 +1,115 @@ +# This file is autogenerated by maturin v1.1.0 +# To update, run +# +# maturin generate-ci github -m ./wrappers/python/Cargo.toml +# +name: Create python wheels + +on: + push: + tags: + - '*' + +permissions: + contents: read + +jobs: + linux: + runs-on: ubuntu-latest + strategy: + matrix: + target: [x86_64, x86, aarch64, armv7, s390x, ppc64le] + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - name: Build wheels + uses: PyO3/maturin-action@v1 + with: + target: ${{ matrix.target }} + args: --release --out dist --find-interpreter --manifest-path ./wrappers/python/Cargo.toml + sccache: 'true' + manylinux: auto + - name: Upload wheels + uses: actions/upload-artifact@v3 + with: + name: wheels + path: dist + + windows: + runs-on: windows-latest + strategy: + matrix: + target: [x64, x86] + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + architecture: ${{ matrix.target }} + - name: Build wheels + uses: PyO3/maturin-action@v1 + with: + target: ${{ matrix.target }} + args: --release --out dist --find-interpreter --manifest-path ./wrappers/python/Cargo.toml + sccache: 'true' + - name: Upload wheels + uses: actions/upload-artifact@v3 + with: + name: wheels + path: dist + + macos: + runs-on: macos-latest + strategy: + matrix: + target: [x86_64, aarch64] + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: '3.10' + - name: Build wheels + uses: PyO3/maturin-action@v1 + with: + target: ${{ matrix.target }} + args: --release --out dist --find-interpreter --manifest-path ./wrappers/python/Cargo.toml + sccache: 'true' + - name: Upload wheels + uses: actions/upload-artifact@v3 + with: + name: wheels + path: dist + + sdist: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Build sdist + uses: PyO3/maturin-action@v1 + with: + command: sdist + args: --out dist --manifest-path ./wrappers/python/Cargo.toml + - name: Upload sdist + uses: actions/upload-artifact@v3 + with: + name: wheels + path: dist + + release: + name: Release + runs-on: ubuntu-latest + if: "startsWith(github.ref, 'refs/tags/')" + needs: [linux, windows, macos, sdist] + steps: + - uses: actions/download-artifact@v3 + with: + name: wheels + - name: Publish to PyPI + uses: PyO3/maturin-action@v1 + env: + MATURIN_PYPI_TOKEN: ${{ secrets.PYPI_API_TOKEN }} + with: + command: upload + args: --skip-existing * diff --git a/.github/workflows/test_python.yml b/.github/workflows/test_python.yml new file mode 100644 index 0000000..6425980 --- /dev/null +++ b/.github/workflows/test_python.yml @@ -0,0 +1,42 @@ +name: Test python wrapper +on: [push] +jobs: + checks: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [ "3.8", "3.11" ] + docker-version: [ "8_11", "9_3" ] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Update Rust + run: | + rustup override set stable + rustup update stable + - name: Set up venv + working-directory: ./wrappers/python + run: | + python -m venv venv + - name: Install dependencies + run: | + source venv/bin/activate + pip3 install -r requirements-dev.txt + working-directory: ./wrappers/python + - name: Install module + run: | + source venv/bin/activate + maturin develop + working-directory: ./wrappers/python + - name: Start docker containers + run: docker-compose up -d + working-directory: ./docker/${{ matrix.docker-version }} + - name: Run tests + working-directory: ./wrappers/python + run: | + source venv/bin/activate + pytest \ No newline at end of file diff --git a/.github/workflows/test_rust.yml b/.github/workflows/test_rust.yml new file mode 100644 index 0000000..0813cc6 --- /dev/null +++ b/.github/workflows/test_rust.yml @@ -0,0 +1,23 @@ +name: Unit tests, linting, and formatting +on: [push] +jobs: + checks: + runs-on: ubuntu-latest + strategy: + matrix: + docker-version: [ "8_11", "9_3" ] + steps: + - uses: actions/checkout@v3 + - name: Update Rust + run: | + rustup override set stable + rustup update stable + - name: Start docker containers + run: docker-compose up -d + working-directory: ./docker/${{ matrix.docker-version }} + - name: Run tests + run: cargo test --all-features + - name: Lint + run: cargo clippy + - name: Check formatting + run: cargo fmt --check \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..4fffb2f --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +/target +/Cargo.lock diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..87d67a7 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,6 @@ +[workspace] + +members = [ + "framework", + "wrappers/python", +] \ No newline at end of file diff --git a/LICENSE-APACHE b/LICENSE-APACHE new file mode 100644 index 0000000..892cc89 --- /dev/null +++ b/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 Andreas H Johansen + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/LICENSE-MIT b/LICENSE-MIT new file mode 100644 index 0000000..0dc6b72 --- /dev/null +++ b/LICENSE-MIT @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 Andreas H Johansen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..d85495f --- /dev/null +++ b/README.md @@ -0,0 +1,116 @@ +# Solrstice: A Solr 8+ Client for Rust and Python + +![Lines of code](https://api.badgestore.dev/badge/ef573e3335d97409/local?style=flat-square) + +Solrstice is a SolrCloud aware client library written in rust. +It also provides a wrapper to python. + +Use the [documentation](https://docs.rs/solrstice) for more information. +## Features +* Config API +* Collection API +* Alias API +* Select Documents + * Grouping Component Query +* Indexing Documents +* Deleting Documents +## Examples +Upload a config, create a collection, index a document, select it, and delete it. +### Rust +```rust +use serde::{Deserialize, Serialize}; +use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; +use solrstice::hosts::solr_server_host::SolrSingleServerHost; +use solrstice::models::auth::SolrBasicAuth; +use solrstice::models::context::SolrServerContext; +use solrstice::models::error::SolrError; +use solrstice::queries::index::{DeleteQueryBuilder, UpdateQueryBuilder}; +use solrstice::queries::select::SelectQueryBuilder; +use std::path::Path; + +#[derive(Serialize, Deserialize, Debug)] +struct TestData { + id: String, +} + +#[tokio::test] +pub async fn example() -> Result<(), SolrError> { + + //Create a solr client. You can also use a list of zookeeper hosts instead of a single server. + let context = SolrServerContext::new(SolrSingleServerHost::new("http://localhost:8983")) + .with_auth(SolrBasicAuth::new("solr", Some("SolrRocks"))); + let client = AsyncSolrCloudClient::new(context); + + // Upload config + client + .upload_config("example_config", Path::new("/path/to/config")) + .await?; + + // Create collection + client + .create_collection("example_collection", "example_config", 1, 1) + .await?; + + // Index document + let docs = vec![TestData { + id: "example_document".to_string(), + }]; + client + .index( + &UpdateQueryBuilder::new(), + "example_collection", + docs.as_slice(), + ) + .await?; + + // Search and retrieve the document + let docs = client + .select( + &SelectQueryBuilder::new().fq(&["id:example_document"]), + "example_collection", + ) + .await? + .get_response() + .ok_or("No response provided")? + .get_docs::()?; + + // Delete the document + client + .delete( + &DeleteQueryBuilder::new().ids(&["example_document"]), + "example_collection", + ) + .await?; + Ok(()) +} +``` +### Python +```python +import asyncio +from solrstice.clients import AsyncSolrCloudClient +from solrstice.hosts import SolrSingleServerHost, SolrServerContext +from solrstice.auth import SolrBasicAuth +from solrstice.queries import UpdateQueryBuilder, SelectQueryBuilder, DeleteQueryBuilder + +# A SolrServerContext specifies how the library should interact with Solr +context = SolrServerContext(SolrSingleServerHost('localhost:8983'), SolrBasicAuth('solr', 'SolrRocks')) +client = AsyncSolrCloudClient(context) + +async def main(): + # Create config and collection + await client.upload_config('example_config', 'path/to/config') + await client.create_collection('example_collection', 'example_config', shards=1, replication_factor=1) + + # Index a document + await client.index(UpdateQueryBuilder(), 'example_collection', [{'id': 'example_document', 'title': 'Example document'}]) + + # Search for the document + response = await client.select(SelectQueryBuilder(fq=['title:Example document']), 'example_collection') + docs = response.get_response().docs + + # Delete the document + await client.delete(DeleteQueryBuilder(ids=['example_document']), 'example_collection') + + +asyncio.run(main()) +``` \ No newline at end of file diff --git a/docker/8_11/Dockerfile b/docker/8_11/Dockerfile new file mode 100644 index 0000000..bfde7a8 --- /dev/null +++ b/docker/8_11/Dockerfile @@ -0,0 +1,5 @@ +FROM solr:8.11.1 + +COPY security.json security.json +COPY solr-security.sh /usr/bin/solr-security.sh +ENTRYPOINT ["/usr/bin/solr-security.sh"] \ No newline at end of file diff --git a/docker/8_11/docker-compose.yml b/docker/8_11/docker-compose.yml new file mode 100644 index 0000000..d14d81b --- /dev/null +++ b/docker/8_11/docker-compose.yml @@ -0,0 +1,35 @@ +version: '3' +services: + zoo1: + image: zookeeper:3.4 + hostname: zoo1 + volumes: + - 'zoo1_data:/data' + environment: + ZOO_MY_ID: 1 + ZOO_PORT: 2181 + ZOO_SERVERS: 'server.1=0.0.0.0:2888:3888' + ports: + - "2181:2181" + restart: unless-stopped + solr1: + build: + context: . + hostname: solr1 + ports: + - "8983:8983" + volumes: + - 'solr1_varsolr:/var/solr' + environment: + ZK_HOST: 'zoo1' + SOLR_JAVA_MEM: "-Xms1g -Xmx1g" + restart: unless-stopped + speedbump: + image: kffl/speedbump:latest + ports: + - "8984:8984" + command: --latency 2s --port 8984 solr1:8983 + restart: unless-stopped +volumes: + zoo1_data: + solr1_varsolr: diff --git a/docker/8_11/security.json b/docker/8_11/security.json new file mode 100644 index 0000000..0b97863 --- /dev/null +++ b/docker/8_11/security.json @@ -0,0 +1,13 @@ +{ + "authentication":{ + "blockUnknown": true, + "class":"solr.BasicAuthPlugin", + "credentials":{"solr":"IV0EHq1OnNrj6gvRCwvFwTrZ1+z1oBbnQdiVC3otuq0= Ndd7LKvVBAaZIF0QAVi1ekCfAJXr1GGfLtRUXhgrF8c="} + }, + "authorization":{ + "class":"solr.RuleBasedAuthorizationPlugin", + "permissions":[{"name":"security-edit", + "role":"admin"}], + "user-role":{"solr":"admin"} + } +} \ No newline at end of file diff --git a/docker/8_11/solr-security.sh b/docker/8_11/solr-security.sh new file mode 100755 index 0000000..f43acb7 --- /dev/null +++ b/docker/8_11/solr-security.sh @@ -0,0 +1,3 @@ +#!/bin/sh +solr zk cp /opt/solr-8.11.1/security.json zk:security.json -z zoo1:2181 +exec /opt/docker-solr/scripts/docker-entrypoint.sh -f "$@" \ No newline at end of file diff --git a/docker/9_3/Dockerfile b/docker/9_3/Dockerfile new file mode 100644 index 0000000..4592ec4 --- /dev/null +++ b/docker/9_3/Dockerfile @@ -0,0 +1,5 @@ +FROM solr:9.3.0 + +COPY security.json security.json +COPY solr-security.sh /usr/bin/solr-security.sh +ENTRYPOINT ["/usr/bin/solr-security.sh"] \ No newline at end of file diff --git a/docker/9_3/docker-compose.yml b/docker/9_3/docker-compose.yml new file mode 100644 index 0000000..d14d81b --- /dev/null +++ b/docker/9_3/docker-compose.yml @@ -0,0 +1,35 @@ +version: '3' +services: + zoo1: + image: zookeeper:3.4 + hostname: zoo1 + volumes: + - 'zoo1_data:/data' + environment: + ZOO_MY_ID: 1 + ZOO_PORT: 2181 + ZOO_SERVERS: 'server.1=0.0.0.0:2888:3888' + ports: + - "2181:2181" + restart: unless-stopped + solr1: + build: + context: . + hostname: solr1 + ports: + - "8983:8983" + volumes: + - 'solr1_varsolr:/var/solr' + environment: + ZK_HOST: 'zoo1' + SOLR_JAVA_MEM: "-Xms1g -Xmx1g" + restart: unless-stopped + speedbump: + image: kffl/speedbump:latest + ports: + - "8984:8984" + command: --latency 2s --port 8984 solr1:8983 + restart: unless-stopped +volumes: + zoo1_data: + solr1_varsolr: diff --git a/docker/9_3/security.json b/docker/9_3/security.json new file mode 100644 index 0000000..0b97863 --- /dev/null +++ b/docker/9_3/security.json @@ -0,0 +1,13 @@ +{ + "authentication":{ + "blockUnknown": true, + "class":"solr.BasicAuthPlugin", + "credentials":{"solr":"IV0EHq1OnNrj6gvRCwvFwTrZ1+z1oBbnQdiVC3otuq0= Ndd7LKvVBAaZIF0QAVi1ekCfAJXr1GGfLtRUXhgrF8c="} + }, + "authorization":{ + "class":"solr.RuleBasedAuthorizationPlugin", + "permissions":[{"name":"security-edit", + "role":"admin"}], + "user-role":{"solr":"admin"} + } +} \ No newline at end of file diff --git a/docker/9_3/solr-security.sh b/docker/9_3/solr-security.sh new file mode 100755 index 0000000..0f982a0 --- /dev/null +++ b/docker/9_3/solr-security.sh @@ -0,0 +1,3 @@ +#!/bin/sh +solr zk cp /opt/solr-9.3.0/security.json zk:security.json -z zoo1:2181 +exec /opt/solr-9.3.0/docker/scripts/docker-entrypoint.sh -f "$@" \ No newline at end of file diff --git a/framework/Cargo.toml b/framework/Cargo.toml new file mode 100644 index 0000000..8b4e785 --- /dev/null +++ b/framework/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "solrstice" +description = "A Solr 8+ client" +version = "0.1.0" +edition = "2021" +license = "MIT OR Apache-2.0" +keywords = ["solr", "search"] +categories = ["api-bindings"] +readme = "README.md" +repository = "https://github.com/Sh1nku/solrstice" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +serde_json = { version = "1", features = ["raw_value"]} +serde = { version = "1", features = ["derive"]} +reqwest = { version = "0.11", features = ["serde_json", "json"]} +async-trait = "0.1" +thiserror = { version = "1", features = []} +zookeeper-async = "4.2" +fastrand = "2.0" +log = "0.4" +zip = { version = "0.6", default-features = false } +tempfile = "3.3" +walkdir = "2.3" +tokio = { version = "1.25", optional = true } +lazy_static = {version = "1.4", optional = true} +dyn-clone = "1.0" +[features] +blocking = ["tokio", "lazy_static"] + +[dev-dependencies] +tokio = { features = ["macros", "rt", "rt-multi-thread"], version = "1.25.0"} +dotenv = "0.15.0" \ No newline at end of file diff --git a/framework/LICENSE-APACHE b/framework/LICENSE-APACHE new file mode 100644 index 0000000..892cc89 --- /dev/null +++ b/framework/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 Andreas H Johansen + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/framework/LICENSE-MIT b/framework/LICENSE-MIT new file mode 100644 index 0000000..0dc6b72 --- /dev/null +++ b/framework/LICENSE-MIT @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 Andreas H Johansen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/framework/README.md b/framework/README.md new file mode 100644 index 0000000..15d246e --- /dev/null +++ b/framework/README.md @@ -0,0 +1,83 @@ +# Solrstice: A Solr 8+ Client for Rust and Python + +![Lines of code](https://api.badgestore.dev/badge/ef573e3335d97409/local?style=flat-square) + +Solrstice is a SolrCloud aware client library written in rust. +It also provides a wrapper to python. +## Features +* Config API +* Collection API +* Alias API +* Select Documents + * Grouping Component Query +* Indexing Documents +* Deleting Documents +## Examples +Upload a config, create a collection, index a document, select it, and delete it. +```rust +use serde::{Deserialize, Serialize}; +use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; +use solrstice::hosts::solr_server_host::SolrSingleServerHost; +use solrstice::models::auth::SolrBasicAuth; +use solrstice::models::context::SolrServerContext; +use solrstice::models::error::SolrError; +use solrstice::queries::index::{DeleteQueryBuilder, UpdateQueryBuilder}; +use solrstice::queries::select::SelectQueryBuilder; +use std::path::Path; + +#[derive(Serialize, Deserialize, Debug)] +struct TestData { + id: String, +} + +#[tokio::test] +pub async fn example() -> Result<(), SolrError> { + + //Create a solr client. You can also use a list of zookeeper hosts instead of a single server. + let context = SolrServerContext::new(SolrSingleServerHost::new("http://localhost:8983")) + .with_auth(SolrBasicAuth::new("solr", Some("SolrRocks"))); + let client = AsyncSolrCloudClient::new(context); + + // Upload config + client + .upload_config("example_config", Path::new("/path/to/config")) + .await?; + + // Create collection + client + .create_collection("example_collection", "example_config", 1, 1) + .await?; + + // Index document + let docs = vec![TestData { + id: "example_document".to_string(), + }]; + client + .index( + &UpdateQueryBuilder::new(), + "example_collection", + docs.as_slice(), + ) + .await?; + + // Search and retrieve the document + let docs = client + .select( + &SelectQueryBuilder::new().fq(&["id:example_document"]), + "example_collection", + ) + .await? + .get_response() + .ok_or("No response provided")? + .get_docs::()?; + + // Delete the document + client + .delete( + &DeleteQueryBuilder::new().ids(&["example_document"]), + "example_collection", + ) + .await?; + Ok(()) +} +``` \ No newline at end of file diff --git a/framework/src/clients/async_cloud_client.rs b/framework/src/clients/async_cloud_client.rs new file mode 100644 index 0000000..bcb1342 --- /dev/null +++ b/framework/src/clients/async_cloud_client.rs @@ -0,0 +1,332 @@ +use crate::models::context::SolrServerContext; +use crate::models::error::SolrError; +use crate::models::response::SolrResponse; +use crate::queries::alias::{alias_exists, create_alias, delete_alias, get_aliases}; +use crate::queries::collection::{ + collection_exists, create_collection, delete_collection, get_collections, +}; +use crate::queries::config::{config_exists, delete_config, get_configs, upload_config}; +use crate::queries::index::{DeleteQueryBuilder, UpdateQueryBuilder}; +use crate::queries::select::SelectQueryBuilder; +use serde::Serialize; +use std::collections::HashMap; +use std::path::Path; + +/// Async client for SolrCloud +/// # Examples +/// ```rust +/// use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; +/// use solrstice::hosts::solr_server_host::SolrSingleServerHost; +/// use solrstice::models::context::SolrServerContextBuilder; +/// +/// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); +/// let client = AsyncSolrCloudClient::new(context); +/// ``` +#[derive(Clone)] +pub struct AsyncSolrCloudClient { + /// The solr server context used to specify how to connect to Solr + pub context: SolrServerContext, +} + +impl AsyncSolrCloudClient { + /// Create a new instance of AsyncSolrCloudClient + /// # Examples + /// ```rust + /// use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// use solrstice::models::context::SolrServerContextBuilder; + /// + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// ``` + pub fn new(context: SolrServerContext) -> AsyncSolrCloudClient { + AsyncSolrCloudClient { context } + } + + /// Upload a config to SolrCloud + /// # Examples + /// ```no_run + /// # use std::path::Path; + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # async fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// client.upload_config("config_name", Path::new("/path/to/config")).await?; + /// # Ok(()) + /// # } + /// ``` + pub async fn upload_config(&self, name: &str, path: &Path) -> Result<(), SolrError> { + upload_config(&self.context, name, path).await + } + + /// Get the configs existing in SolrCloud + /// # Examples + /// ```no_run + /// # use std::path::Path; + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # async fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// let configs: Vec = client.get_configs().await?; + /// # Ok(()) + /// # } + /// ``` + pub async fn get_configs(&self) -> Result, SolrError> { + get_configs(&self.context).await + } + + /// Check if a config exists in SolrCloud + /// # Examples + /// ```no_run + /// # use std::path::Path; + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # async fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// let exists: bool = client.config_exists("config_name").await?; + /// # Ok(()) + /// # } + /// ``` + pub async fn config_exists(&self, name: &str) -> Result { + config_exists(&self.context, name).await + } + + /// Delete a config from SolrCloud + /// # Examples + /// ```no_run + /// # use std::path::Path; + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # async fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// client.delete_config("config_name").await?; + /// # Ok(()) + /// # } + /// ``` + pub async fn delete_config(&self, name: &str) -> Result<(), SolrError> { + delete_config(&self.context, name).await + } + + /// Create a collection in SolrCloud + /// # Examples + /// ```no_run + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # async fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// client.create_collection("collection_name", "config_name", 1, 1).await?; + /// # Ok(()) + /// # } + /// ``` + pub async fn create_collection( + &self, + name: &str, + config: &str, + shards: usize, + replication_factor: usize, + ) -> Result<(), SolrError> { + create_collection(&self.context, name, config, shards, replication_factor).await + } + + /// Get collections from SolrCloud + /// # Examples + /// ```no_run + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # async fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// let collections: Vec = client.get_collections().await?; + /// # Ok(()) + /// # } + /// ``` + pub async fn get_collections(&self) -> Result, SolrError> { + get_collections(&self.context).await + } + + /// Check if a collection exists in SolrCloud + /// # Examples + /// ```no_run + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # async fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// let exists: bool = client.collection_exists("collection_name").await?; + /// # Ok(()) + /// # } + /// ``` + pub async fn collection_exists(&self, name: &str) -> Result { + collection_exists(&self.context, name).await + } + + /// Delete a collection from SolrCloud + /// # Examples + /// ```no_run + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # async fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// client.delete_collection("collection_name").await?; + /// # Ok(()) + /// # } + /// ``` + pub async fn delete_collection(&self, name: &str) -> Result<(), SolrError> { + delete_collection(&self.context, name).await + } + + /// Create an alias in SolrCloud + /// # Examples + /// ```no_run + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # async fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// client.create_alias("alias_name", &["collection1", "collection2"]).await?; + /// # Ok(()) + /// # } + /// ``` + pub async fn create_alias(&self, alias: &str, collections: &[&str]) -> Result<(), SolrError> { + create_alias(&self.context, alias, collections).await + } + + /// Get aliases from SolrCloud + /// # Examples + /// ```no_run + /// # use std::collections::HashMap; + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # async fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// let aliases: HashMap> = client.get_aliases().await?; + /// # Ok(()) + /// # } + /// ``` + pub async fn get_aliases(&self) -> Result>, SolrError> { + get_aliases(&self.context).await + } + + /// Check if an alias exists in SolrCloud + /// # Examples + /// ```no_run + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # async fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// let exists: bool = client.alias_exists("alias_name").await?; + /// # Ok(()) + /// # } + /// ``` + pub async fn alias_exists(&self, name: &str) -> Result { + alias_exists(&self.context, name).await + } + + /// Delete an alias from SolrCloud + /// # Examples + /// ```no_run + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # async fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// client.delete_alias("alias_name").await?; + /// # Ok(()) + /// # } + /// ``` + pub async fn delete_alias(&self, name: &str) -> Result<(), SolrError> { + delete_alias(&self.context, name).await + } + + /// Index some data into SolrCloud + /// # Examples + /// ```no_run + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # use solrstice::queries::index::UpdateQueryBuilder; + /// # use serde::Serialize; + /// # async fn run() -> Result<(), Box> { + /// #[derive(Serialize)] + /// struct Data {id: String} + /// + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// let response = client.index(&UpdateQueryBuilder::new(), "collection_name", &[Data {id: "test".to_string()}]).await?; + /// # Ok(()) + /// # } + /// ``` + pub async fn index( + &self, + builder: &UpdateQueryBuilder, + collection: &str, + data: &[T], + ) -> Result { + builder.execute(&self.context, collection, data).await + } + + /// Select some data from SolrCloud + /// # Examples + /// ```no_run + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # use solrstice::queries::select::SelectQueryBuilder; + /// # async fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// let response = client.select(&SelectQueryBuilder::new().fq(&["age:[* TO *]"]), "collection_name").await?; + /// # Ok(()) + /// # } + /// ``` + pub async fn select( + &self, + builder: &SelectQueryBuilder, + collection: &str, + ) -> Result { + builder.execute(&self.context, collection).await + } + + /// Delete some data from SolrCloud + /// # Examples + /// ```no_run + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # use solrstice::queries::index::DeleteQueryBuilder; + /// # use solrstice::queries::select::SelectQueryBuilder; + /// # async fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// let response = client.delete(&DeleteQueryBuilder::new().ids(&["document1"]).queries(&["age:[* TO *]"]), "collection_name").await?; + /// # Ok(()) + /// # } + /// ``` + pub async fn delete( + &self, + builder: &DeleteQueryBuilder, + collection: &str, + ) -> Result { + builder.execute(&self.context, collection).await + } +} diff --git a/framework/src/clients/blocking_cloud_client.rs b/framework/src/clients/blocking_cloud_client.rs new file mode 100644 index 0000000..68602c4 --- /dev/null +++ b/framework/src/clients/blocking_cloud_client.rs @@ -0,0 +1,338 @@ +use crate::models::context::SolrServerContext; +use crate::models::error::SolrError; +use crate::models::response::SolrResponse; +use crate::queries::alias::{ + alias_exists_blocking, create_alias_blocking, delete_alias_blocking, get_aliases_blocking, +}; +use crate::queries::collection::{ + collection_exists_blocking, create_collection_blocking, delete_collection_blocking, + get_collections_blocking, +}; +use crate::queries::config::{ + config_exists_blocking, delete_config_blocking, get_configs_blocking, upload_config_blocking, +}; +use crate::queries::index::{DeleteQueryBuilder, UpdateQueryBuilder}; +use crate::queries::select::SelectQueryBuilder; +use serde::Serialize; +use std::collections::HashMap; +use std::path::Path; + +/// A blocking client for SolrCloud. +/// # Examples +/// ```rust +/// use solrstice::clients::blocking_cloud_client::BlockingSolrCloudClient; +/// use solrstice::hosts::solr_server_host::SolrSingleServerHost; +/// use solrstice::models::context::SolrServerContextBuilder; +/// +/// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); +/// let client = BlockingSolrCloudClient::new(context); +/// ``` +#[derive(Clone)] +pub struct BlockingSolrCloudClient { + /// The solr server context used to specify how to connect to Solr + pub context: SolrServerContext, +} + +impl BlockingSolrCloudClient { + /// Create a new instance of BlockingSolrCloudClient + /// # Examples + /// ```rust + /// use solrstice::clients::blocking_cloud_client::BlockingSolrCloudClient; + /// use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// use solrstice::models::context::SolrServerContextBuilder; + /// + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = BlockingSolrCloudClient::new(context); + /// ``` + pub fn new(context: SolrServerContext) -> BlockingSolrCloudClient { + BlockingSolrCloudClient { context } + } + + /// Upload a config to SolrCloud + /// # Examples + /// ```no_run + /// # use std::path::Path; + /// # use solrstice::clients::blocking_cloud_client::BlockingSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = BlockingSolrCloudClient::new(context); + /// client.upload_config("config_name", Path::new("/path/to/config"))?; + /// # Ok(()) + /// # } + /// ``` + pub fn upload_config(&self, name: &str, path: &Path) -> Result<(), SolrError> { + upload_config_blocking(&self.context, name, path) + } + + /// Get the configs existing in SolrCloud + /// # Examples + /// ```no_run + /// # use std::path::Path; + /// # use solrstice::clients::blocking_cloud_client::BlockingSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = BlockingSolrCloudClient::new(context); + /// let configs: Vec = client.get_configs()?; + /// # Ok(()) + /// # } + /// ``` + pub fn get_configs(&self) -> Result, SolrError> { + get_configs_blocking(&self.context) + } + + /// Check if a config exists in SolrCloud + /// # Examples + /// ```no_run + /// # use std::path::Path; + /// # use solrstice::clients::blocking_cloud_client::BlockingSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = BlockingSolrCloudClient::new(context); + /// let exists: bool = client.config_exists("config_name")?; + /// # Ok(()) + /// # } + /// ``` + pub fn config_exists(&self, name: &str) -> Result { + config_exists_blocking(&self.context, name) + } + + /// Delete a config from SolrCloud + /// # Examples + /// ```no_run + /// # use std::path::Path; + /// # use solrstice::clients::blocking_cloud_client::BlockingSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = BlockingSolrCloudClient::new(context); + /// client.delete_config("config_name")?; + /// # Ok(()) + /// # } + /// ``` + pub fn delete_config(&self, name: &str) -> Result<(), SolrError> { + delete_config_blocking(&self.context, name) + } + + /// Create a collection in SolrCloud + /// # Examples + /// ```no_run + /// # use solrstice::clients::blocking_cloud_client::BlockingSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = BlockingSolrCloudClient::new(context); + /// client.create_collection("collection_name", "config_name", 1, 1)?; + /// # Ok(()) + /// # } + /// ``` + pub fn create_collection( + &self, + name: &str, + config: &str, + shards: usize, + replication_factor: usize, + ) -> Result<(), SolrError> { + create_collection_blocking(&self.context, name, config, shards, replication_factor) + } + + /// Get collections from SolrCloud + /// # Examples + /// ```no_run + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// use solrstice::clients::blocking_cloud_client::BlockingSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = BlockingSolrCloudClient::new(context); + /// let collections: Vec = client.get_collections()?; + /// # Ok(()) + /// # } + /// ``` + pub fn get_collections(&self) -> Result, SolrError> { + get_collections_blocking(&self.context) + } + + /// Check if a collection exists in SolrCloud + /// # Examples + /// ```no_run + /// # use solrstice::clients::blocking_cloud_client::BlockingSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # async fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = BlockingSolrCloudClient::new(context); + /// let exists: bool = client.collection_exists("collection_name")?; + /// # Ok(()) + /// # } + /// ``` + pub fn collection_exists(&self, name: &str) -> Result { + collection_exists_blocking(&self.context, name) + } + + /// Delete a collection from SolrCloud + /// # Examples + /// ```no_run + /// # use solrstice::clients::blocking_cloud_client::BlockingSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = BlockingSolrCloudClient::new(context); + /// client.delete_collection("collection_name")?; + /// # Ok(()) + /// # } + /// ``` + pub fn delete_collection(&self, name: &str) -> Result<(), SolrError> { + delete_collection_blocking(&self.context, name) + } + + /// Create an alias in SolrCloud + /// # Examples + /// ```no_run + /// # use solrstice::clients::blocking_cloud_client::BlockingSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = BlockingSolrCloudClient::new(context); + /// client.create_alias("alias_name", &["collection1", "collection2"])?; + /// # Ok(()) + /// # } + /// ``` + pub fn create_alias(&self, alias: &str, collections: &[&str]) -> Result<(), SolrError> { + create_alias_blocking(&self.context, alias, collections) + } + + /// Get aliases from SolrCloud + /// # Examples + /// ```no_run + /// # use std::collections::HashMap; + /// # use solrstice::clients::blocking_cloud_client::BlockingSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = BlockingSolrCloudClient::new(context); + /// let aliases: HashMap> = client.get_aliases()?; + /// # Ok(()) + /// # } + /// ``` + pub fn get_aliases(&self) -> Result>, SolrError> { + get_aliases_blocking(&self.context) + } + + /// Check if an alias exists in SolrCloud + /// # Examples + /// ```no_run + /// # use solrstice::clients::blocking_cloud_client::BlockingSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = BlockingSolrCloudClient::new(context); + /// let exists: bool = client.alias_exists("alias_name")?; + /// # Ok(()) + /// # } + /// ``` + pub fn alias_exists(&self, name: &str) -> Result { + alias_exists_blocking(&self.context, name) + } + + /// Delete an alias from SolrCloud + /// # Examples + /// ```no_run + /// use solrstice::clients::blocking_cloud_client::BlockingSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = BlockingSolrCloudClient::new(context); + /// client.delete_alias("alias_name")?; + /// # Ok(()) + /// # } + /// ``` + pub fn delete_alias(&self, name: &str) -> Result<(), SolrError> { + delete_alias_blocking(&self.context, name) + } + + /// Index some data into SolrCloud + /// # Examples + /// ```no_run + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # use solrstice::queries::index::UpdateQueryBuilder; + /// # use serde::Serialize; + /// # use solrstice::clients::blocking_cloud_client::BlockingSolrCloudClient; + /// # async fn run() -> Result<(), Box> { + /// #[derive(Serialize)] + /// struct Data {id: String} + /// + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = BlockingSolrCloudClient::new(context); + /// let response = client.index(&UpdateQueryBuilder::new(), "collection_name", &[Data {id: "test".to_string()}])?; + /// # Ok(()) + /// # } + /// ``` + pub fn index( + &self, + builder: &UpdateQueryBuilder, + collection: &str, + data: &[T], + ) -> Result { + builder.execute_blocking(&self.context, collection, data) + } + + /// Select some data from SolrCloud + /// # Examples + /// ```no_run + /// # use solrstice::clients::blocking_cloud_client::BlockingSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # use solrstice::queries::select::SelectQueryBuilder; + /// # fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = BlockingSolrCloudClient::new(context); + /// let response = client.select(&SelectQueryBuilder::new().fq(&["age:[* TO *]"]), "collection_name")?; + /// # Ok(()) + /// # } + /// ``` + pub fn select( + &self, + builder: &SelectQueryBuilder, + collection: &str, + ) -> Result { + builder.execute_blocking(&self.context, collection) + } + + /// Delete some data from SolrCloud + /// # Examples + /// ```no_run + /// # use solrstice::clients::blocking_cloud_client::BlockingSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # use solrstice::queries::index::DeleteQueryBuilder; + /// # use solrstice::queries::select::SelectQueryBuilder; + /// # fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = BlockingSolrCloudClient::new(context); + /// let response = client.delete(&DeleteQueryBuilder::new().ids(&["document1"]).queries(&["age:[* TO *]"]), "collection_name")?; + /// # Ok(()) + /// # } + /// ``` + pub fn delete( + &self, + builder: &DeleteQueryBuilder, + collection: &str, + ) -> Result { + builder.execute_blocking(&self.context, collection) + } +} diff --git a/framework/src/clients/mod.rs b/framework/src/clients/mod.rs new file mode 100644 index 0000000..5d3cdce --- /dev/null +++ b/framework/src/clients/mod.rs @@ -0,0 +1,26 @@ +//! Clients for interacting with Solr. +//! # Examples +//! ## Async client for SolrCloud +//! ```rust +//! use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; +//! use solrstice::hosts::solr_server_host::SolrSingleServerHost; +//! use solrstice::models::context::SolrServerContextBuilder; +//! +//! let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); +//! let client = AsyncSolrCloudClient::new(context); +//! ``` +//! ## Blocking client for SolrCloud +//! ```rust +//! use solrstice::clients::blocking_cloud_client::BlockingSolrCloudClient; +//! use solrstice::hosts::solr_server_host::SolrSingleServerHost; +//! use solrstice::models::context::SolrServerContextBuilder; +//! +//! let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); +//! let client = BlockingSolrCloudClient::new(context); +//! ``` + +/// Client for interacting asynchronously with SolrCloud. +pub mod async_cloud_client; +/// Client for interacting blocking with SolrCloud. +#[cfg(feature = "blocking")] +pub mod blocking_cloud_client; diff --git a/framework/src/hosts/mod.rs b/framework/src/hosts/mod.rs new file mode 100644 index 0000000..15bc627 --- /dev/null +++ b/framework/src/hosts/mod.rs @@ -0,0 +1,31 @@ +//! Host types +//! # Examples +//! ## Connect to a single solr host +//! Good for if you have an external load balancer +//! ```rust +//! use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; +//! use solrstice::hosts::solr_server_host::SolrSingleServerHost; +//! use solrstice::models::context::{SolrServerContextBuilder}; +//! +//! let host = SolrSingleServerHost::new("localhost:8983"); +//! let client = AsyncSolrCloudClient::new(SolrServerContextBuilder::new(host).build()); +//! ``` +//! ## Connect to zookeeper instances +//! ```no_run +//! use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; +//! use solrstice::hosts::zookeeper_host::ZookeeperEnsembleHostConnector; +//! use solrstice::models::context::{SolrServerContextBuilder}; +//! +//! # async fn run() -> Result<(), Box> { +//! let host = ZookeeperEnsembleHostConnector::new(&["localhost:8983", "localhost:8984"], std::time::Duration::from_secs(3)).connect().await?; +//! let client = AsyncSolrCloudClient::new(SolrServerContextBuilder::new(host).build()); +//! # Ok(()) +//! # } +//! ``` + +/// Solr auth host trait +pub mod solr_host; +/// Direct solr connectors +pub mod solr_server_host; +/// Zookeeper connector +pub mod zookeeper_host; diff --git a/framework/src/hosts/solr_host.rs b/framework/src/hosts/solr_host.rs new file mode 100644 index 0000000..de62c9b --- /dev/null +++ b/framework/src/hosts/solr_host.rs @@ -0,0 +1,11 @@ +use crate::models::error::SolrError; +use async_trait::async_trait; +use dyn_clone::DynClone; +use std::borrow::Cow; + +/// SolrHost specifies how to connect to a solr server. +#[async_trait] +pub trait SolrHost: DynClone { + async fn get_solr_node(&self) -> Result, SolrError>; +} +dyn_clone::clone_trait_object!(SolrHost); diff --git a/framework/src/hosts/solr_server_host.rs b/framework/src/hosts/solr_server_host.rs new file mode 100644 index 0000000..2fb8948 --- /dev/null +++ b/framework/src/hosts/solr_server_host.rs @@ -0,0 +1,118 @@ +use crate::hosts::solr_host::SolrHost; +use crate::models::error::SolrError; +use async_trait::async_trait; +use std::borrow::Cow; +use std::time::Duration; + +/// Connect to a single solr host +/// Good for if you have an external load balancer +/// ```rust +/// use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; +/// use solrstice::hosts::solr_server_host::SolrSingleServerHost; +/// use solrstice::models::context::{SolrServerContextBuilder}; +/// +/// let host = SolrSingleServerHost::new("localhost:8983"); +/// let client = AsyncSolrCloudClient::new(SolrServerContextBuilder::new(host).build()); +/// ``` +#[derive(Clone)] +pub struct SolrSingleServerHost { + pub host: String, +} + +#[async_trait] +impl SolrHost for SolrSingleServerHost { + async fn get_solr_node(&self) -> Result, SolrError> { + Ok(Cow::Borrowed(&self.host)) + } +} + +impl SolrSingleServerHost { + /// Connect to a single solr host + /// Good for if you have an external load balancer + /// ```rust + /// use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// use solrstice::models::context::{SolrServerContextBuilder}; + /// + /// let host = SolrSingleServerHost::new("localhost:8983"); + /// let client = AsyncSolrCloudClient::new(SolrServerContextBuilder::new(host).build()); + /// ``` + pub fn new(host: &str) -> SolrSingleServerHost { + SolrSingleServerHost { + host: host.to_string(), + } + } +} + +/// Connect to multiple solr hosts. Acts as a load balancer with random selection +/// +/// It would be better to use [ZookeeperEnsembleHostConnector](crate::hosts::zookeeper_host::ZookeeperEnsembleHostConnector) instead. +/// The timeout is used to determine how long to wait for a response from a solr host before trying the next one +/// ```rust +/// use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; +/// use solrstice::hosts::solr_server_host::{SolrMultipleServerHost}; +/// use solrstice::models::context::{SolrServerContextBuilder}; +/// +/// let host = SolrMultipleServerHost::new(&["localhost:8983", "localhost:8984"], std::time::Duration::from_secs(3)); +/// let client = AsyncSolrCloudClient::new(SolrServerContextBuilder::new(host).build()); +/// ``` +#[derive(Clone)] +pub struct SolrMultipleServerHost { + pub hosts: Vec, + pub timeout: Duration, +} + +#[async_trait] +impl SolrHost for SolrMultipleServerHost { + async fn get_solr_node(&self) -> Result, SolrError> { + let mut server_indices: Vec = (0..self.hosts.len()).collect(); + if server_indices.is_empty() { + return Err(SolrError::SolrConnectionError( + "No Solr Host Specified".to_string(), + )); + } + fastrand::shuffle(&mut server_indices); + for i in server_indices { + match self.hosts.get(i) { + None => continue, + Some(r) => { + //TODO There might be a better way to do this + let client = reqwest::Client::new(); + let res = client + .get(format!("{}/solr/", r)) + .timeout(self.timeout) + .send() + .await; + if res.is_err() { + continue; + } + return Ok(Cow::Borrowed(r)); + } + } + } + Err(SolrError::SolrConnectionError( + "No Solr Host answered".to_string(), + )) + } +} + +impl SolrMultipleServerHost { + /// Connect to multiple solr hosts. Acts as a load balancer with random selection + /// + /// It would be better to use [ZookeeperEnsembleHostConnector](crate::hosts::zookeeper_host::ZookeeperEnsembleHostConnector) instead. + /// The timeout is used to determine how long to wait for a response from a solr host before trying the next one + /// ```rust + /// use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// use solrstice::hosts::solr_server_host::{SolrMultipleServerHost}; + /// use solrstice::models::context::{SolrServerContextBuilder}; + /// + /// let host = SolrMultipleServerHost::new(&["localhost:8983", "localhost:8984"], std::time::Duration::from_secs(3)); + /// let client = AsyncSolrCloudClient::new(SolrServerContextBuilder::new(host).build()); + /// ``` + pub fn new(hosts: &[&str], timeout: Duration) -> SolrMultipleServerHost { + SolrMultipleServerHost { + hosts: hosts.iter().map(|x| x.to_string()).collect(), + timeout, + } + } +} diff --git a/framework/src/hosts/zookeeper_host.rs b/framework/src/hosts/zookeeper_host.rs new file mode 100644 index 0000000..8f88a61 --- /dev/null +++ b/framework/src/hosts/zookeeper_host.rs @@ -0,0 +1,146 @@ +use crate::hosts::solr_host::SolrHost; +use crate::models::error::SolrError; +use async_trait::async_trait; +use log::info; +use std::borrow::{Borrow, Cow}; +use std::sync::Arc; +use std::time::Duration; +use zookeeper_async::{WatchedEvent, Watcher, ZkResult, ZooKeeper}; + +/// Connect to zookeeper instances to get a list of solr nodes to connect to. Select randomly from the list of live nodes. +/// The timeout is used to determine how long to wait for a response from a solr host before trying the next one +/// ```no_run +/// use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; +/// use solrstice::hosts::zookeeper_host::ZookeeperEnsembleHostConnector; +/// use solrstice::models::context::{SolrServerContextBuilder}; +/// +/// # async fn run() -> Result<(), Box> { +/// let host = ZookeeperEnsembleHostConnector::new(&["localhost:8983", "localhost:8984"], std::time::Duration::from_secs(3)).connect().await?; +/// let client = AsyncSolrCloudClient::new(SolrServerContextBuilder::new(host).build()); +/// # Ok(()) +/// # } +/// ``` +#[derive(Clone)] +pub struct ZookeeperEnsembleHostConnector { + pub hosts: Vec, + pub timeout: Duration, +} + +impl ZookeeperEnsembleHostConnector { + /// Connect to zookeeper instances to get a list of solr nodes to connect to. Select randomly from the list of live nodes. + /// The timeout is used to determine how long to wait for a response from a solr host before trying the next one + /// ```no_run + /// use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// use solrstice::hosts::zookeeper_host::ZookeeperEnsembleHostConnector; + /// use solrstice::models::context::{SolrServerContextBuilder}; + /// + /// # async fn run() -> Result<(), Box> { + /// let host = ZookeeperEnsembleHostConnector::new(&["localhost:8983", "localhost:8984"], std::time::Duration::from_secs(3)).connect().await?; + /// let client = AsyncSolrCloudClient::new(SolrServerContextBuilder::new(host).build()); + /// # Ok(()) + /// # } + /// ``` + pub fn new(hosts: &[&str], timeout: Duration) -> ZookeeperEnsembleHostConnector { + ZookeeperEnsembleHostConnector { + hosts: hosts.iter().map(|x| x.to_string()).collect(), + timeout, + } + } + + /// Connect to zookeeper instances to get a list of solr nodes to connect to. Select randomly from the list of live nodes. + /// The timeout is used to determine how long to wait for a response from a solr host before trying the next one + /// ```no_run + /// use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// use solrstice::hosts::zookeeper_host::ZookeeperEnsembleHostConnector; + /// use solrstice::models::context::{SolrServerContextBuilder}; + /// + /// # async fn run() -> Result<(), Box> { + /// let host = ZookeeperEnsembleHostConnector::new(&["localhost:8983", "localhost:8984"], std::time::Duration::from_secs(3)).connect().await?; + /// let client = AsyncSolrCloudClient::new(SolrServerContextBuilder::new(host).build()); + /// # Ok(()) + /// # } + /// ``` + pub async fn connect(&self) -> Result { + ZookeeperEnsembleHost::new(&self.hosts, self.timeout).await + } +} + +#[cfg(feature = "blocking")] +use crate::runtime::RUNTIME; +#[cfg(feature = "blocking")] +impl ZookeeperEnsembleHostConnector { + /// Connect to zookeeper instances to get a list of solr nodes to connect to. Select randomly from the list of live nodes. + /// The timeout is used to determine how long to wait for a response from a solr host before trying the next one + /// ```no_run + /// use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// use solrstice::clients::blocking_cloud_client::BlockingSolrCloudClient; + /// use solrstice::hosts::solr_server_host::{SolrMultipleServerHost}; + /// use solrstice::hosts::zookeeper_host::ZookeeperEnsembleHostConnector; + /// use solrstice::models::context::{SolrServerContextBuilder}; + /// # async fn run() -> Result<(), Box> { + /// let host = ZookeeperEnsembleHostConnector::new(&["localhost:8983", "localhost:8984"], std::time::Duration::from_secs(3)).connect_blocking()?; + /// let client = BlockingSolrCloudClient::new(SolrServerContextBuilder::new(host).build()); + /// # Ok(()) + /// # } + /// ``` + pub fn connect_blocking(&self) -> Result { + RUNTIME.block_on(self.connect()) + } +} + +/// Connect to zookeeper instances to get a list of solr nodes to connect to. Select randomly from the list of live nodes. +/// The timeout is used to determine how long to wait for a response from a solr host before trying the next one +/// ```rust +/// use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; +/// use solrstice::hosts::zookeeper_host::ZookeeperEnsembleHostConnector; +/// use solrstice::models::context::{SolrServerContextBuilder}; +/// +/// # async fn run() -> Result<(), Box> { +/// let host = ZookeeperEnsembleHostConnector::new(&["localhost:8983", "localhost:8984"], std::time::Duration::from_secs(3)).connect().await?; +/// let client = AsyncSolrCloudClient::new(SolrServerContextBuilder::new(host).build()); +/// # Ok(()) +/// # } +/// ``` +#[derive(Clone)] +pub struct ZookeeperEnsembleHost { + client: Arc, +} + +impl ZookeeperEnsembleHost { + pub(crate) async fn new>( + hosts: &[T], + timeout: Duration, + ) -> Result { + Ok(ZookeeperEnsembleHost { + client: Arc::new(ZooKeeper::connect(&hosts.join(","), timeout, LoggingWatcher).await?), + }) + } +} + +#[async_trait] +impl SolrHost for ZookeeperEnsembleHost { + async fn get_solr_node(&self) -> Result, SolrError> { + let hosts = get_hosts_from_zookeeper(&self.client).await?; + match hosts.get(fastrand::usize(0..hosts.len())) { + None => Err(SolrError::SolrConnectionError( + "No ready Solr nodes from Zookeeper".to_string(), + )), + //TODO Investigate this further. Is it always http://, and do people use auth? + Some(r) => Ok(Cow::Owned(format!( + "http://{}", + r.strip_suffix("_solr").unwrap_or(r) + ))), + } + } +} + +pub struct LoggingWatcher; +impl Watcher for LoggingWatcher { + fn handle(&self, e: WatchedEvent) { + info!("{:?}", e) + } +} + +pub async fn get_hosts_from_zookeeper(client: &ZooKeeper) -> ZkResult> { + client.get_children("/live_nodes", true).await +} diff --git a/framework/src/lib.rs b/framework/src/lib.rs new file mode 100644 index 0000000..1fef2b8 --- /dev/null +++ b/framework/src/lib.rs @@ -0,0 +1,82 @@ +//! Solrstice is a Solr 8+ client for Rust. +//! Take a look at [AsyncSolrCloudClient](crate::clients::async_cloud_client::AsyncSolrCloudClient) and [SelectQueryBuilder](crate::queries::select::SelectQueryBuilder) for more documentation +//! # Examples +//! ```no_run +//! use serde::{Deserialize, Serialize}; +//! use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; +//! use solrstice::hosts::solr_server_host::SolrSingleServerHost; +//! use solrstice::models::auth::SolrBasicAuth; +//! use solrstice::models::context::SolrServerContext; +//! use solrstice::models::error::SolrError; +//! use solrstice::queries::index::{DeleteQueryBuilder, UpdateQueryBuilder}; +//! use solrstice::queries::select::SelectQueryBuilder; +//! use std::path::Path; +//! +//! #[derive(Serialize, Deserialize, Debug)] +//! struct TestData { +//! id: String, +//! } +//! +//! #[tokio::test] +//! pub async fn example() -> Result<(), SolrError> { +//! +//! //Create a solr client. You can also use a list of zookeeper hosts instead of a single server. +//! let context = SolrServerContext::new(SolrSingleServerHost::new("http://localhost:8983")) +//! .with_auth(SolrBasicAuth::new("solr", Some("SolrRocks"))); +//! let client = AsyncSolrCloudClient::new(context); +//! +//! // Upload config +//! client +//! .upload_config("example_config", Path::new("/path/to/config")) +//! .await?; +//! +//! // Create collection +//! client +//! .create_collection("example_collection", "example_config", 1, 1) +//! .await?; +//! +//! // Index document +//! let docs = vec![TestData { +//! id: "example_document".to_string(), +//! }]; +//! client +//! .index( +//! &UpdateQueryBuilder::new(), +//! "example_collection", +//! docs.as_slice(), +//! ) +//! .await?; +//! +//! // Search and retrieve the document +//! let docs = client +//! .select( +//! &SelectQueryBuilder::new().fq(&["id:example_document"]), +//! "example_collection", +//! ) +//! .await? +//! .get_response() +//! .ok_or("No response provided")? +//! .get_docs::()?; +//! +//! // Delete the document +//! client +//! .delete( +//! &DeleteQueryBuilder::new().ids(&["example_document"]), +//! "example_collection", +//! ) +//! .await?; +//! Ok(()) +//! } +//! ``` + +/// Solr Clients +pub mod clients; +/// Host types +pub mod hosts; +/// Model structs +pub mod models; +/// Query types +pub mod queries; +#[cfg(feature = "blocking")] +/// Tokio Runtime for blocking usage +pub mod runtime; diff --git a/framework/src/models/auth.rs b/framework/src/models/auth.rs new file mode 100644 index 0000000..0e5c9f7 --- /dev/null +++ b/framework/src/models/auth.rs @@ -0,0 +1,37 @@ +use dyn_clone::DynClone; +use reqwest::RequestBuilder; + +/// Modifies a reqwest::RequestBuilder to add authentication +pub trait SolrAuth: DynClone { + fn add_auth_to_request(&self, request: RequestBuilder) -> RequestBuilder; +} +dyn_clone::clone_trait_object!(SolrAuth); + +/// Basic Authentication +/// # Examples +/// ``` +/// use solrstice::models::auth::SolrBasicAuth; +/// let auth = SolrBasicAuth::new("solr", Some("SolrRocks")); +#[derive(Clone)] +pub struct SolrBasicAuth { + pub username: String, + pub password: Option, +} + +impl SolrAuth for SolrBasicAuth { + fn add_auth_to_request(&self, request: RequestBuilder) -> RequestBuilder { + request.basic_auth(&self.username, self.password.as_ref()) + } +} + +impl SolrBasicAuth { + /// Create a new Basic Authentication + /// use solrstice::models::auth::SolrBasicAuth; + /// let auth = SolrBasicAuth::new("solr", Some("SolrRocks")); + pub fn new(username: &str, password: Option<&str>) -> SolrBasicAuth { + SolrBasicAuth { + username: username.to_string(), + password: password.map(|x| x.to_string()), + } + } +} diff --git a/framework/src/models/commit_type.rs b/framework/src/models/commit_type.rs new file mode 100644 index 0000000..b1ad104 --- /dev/null +++ b/framework/src/models/commit_type.rs @@ -0,0 +1,23 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Copy, Clone, Serialize, Deserialize, PartialEq, Debug)] +/// This struct encapsulates the commit types for Solr's update and delete queries. +/// By default, a `Hard` commit is performed, equating to `commit=true`. +/// Conversely, a `Soft` commit corresponds to `softCommit=true`. +/// # Examples +/// ``` +/// use solrstice::models::commit_type::CommitType; +/// use solrstice::queries::index::{DeleteQueryBuilder, UpdateQueryBuilder}; +/// +/// let update_query = UpdateQueryBuilder::new().commit_type(CommitType::Soft); +/// let delete_query = DeleteQueryBuilder::new().commit_type(CommitType::Soft); +pub enum CommitType { + Hard, + Soft, +} + +impl Default for CommitType { + fn default() -> Self { + Self::Hard + } +} diff --git a/framework/src/models/context.rs b/framework/src/models/context.rs new file mode 100644 index 0000000..39f8dfb --- /dev/null +++ b/framework/src/models/context.rs @@ -0,0 +1,112 @@ +use crate::hosts::solr_host::SolrHost; +use crate::models::auth::SolrAuth; +use std::sync::Arc; + +/// A SolrServerContext specifies how to connect to a solr server, and how to authenticate. +/// # Examples +/// ``` +/// use solrstice::models::context::SolrServerContextBuilder; +/// use solrstice::hosts::solr_host::SolrHost; +/// use solrstice::hosts::solr_server_host::SolrSingleServerHost; +/// use solrstice::models::auth::SolrBasicAuth; +/// +/// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")) +/// .with_auth(SolrBasicAuth::new("solr", Some("SolrRocks"))) +/// .build(); +/// ``` +#[derive(Clone)] +pub struct SolrServerContextBuilder { + pub(crate) host: Arc, + pub(crate) auth: Option>, + pub(crate) client: Option, +} + +impl SolrServerContextBuilder { + /// Create a new SolrServerContextBuilder + /// # Examples + /// ```no_run + /// use solrstice::models::context::SolrServerContextBuilder; + /// use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// ``` + pub fn new(host: A) -> Self { + Self { + host: Arc::new(host), + auth: None, + client: None, + } + } + + /// Create a new SolrServerContextBuilder + /// # Examples + /// ```no_run + /// use solrstice::models::context::SolrServerContextBuilder; + /// use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// use solrstice::models::auth::SolrBasicAuth; + /// + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")) + /// .with_auth(SolrBasicAuth::new("username", Some("password"))).build(); + /// ``` + pub fn with_auth(mut self, auth: impl SolrAuth + Send + Sync + 'static) -> Self { + self.auth = Some(Arc::new(auth)); + self + } + + /// Use a custom reqwest client + /// # Examples + /// ``` + /// use std::time::Duration; + /// use solrstice::models::context::SolrServerContextBuilder; + /// use reqwest::Client; + /// use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// + /// let client = Client::builder().timeout(Duration::from_secs(10)).build().unwrap(); + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).with_client(client).build(); + pub fn with_client(mut self, client: reqwest::Client) -> Self { + self.client = Some(client); + self + } + + /// Build a SolrServerContext + /// # Examples + /// ```no_run + /// use solrstice::models::context::SolrServerContextBuilder; + /// use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// ``` + pub fn build(self) -> SolrServerContext { + self.into() + } +} + +/// A SolrServerContext specifies how to connect to a solr server, and how to authenticate. +/// # Examples +/// ``` +/// use solrstice::models::context::SolrServerContextBuilder; +/// use solrstice::hosts::solr_host::SolrHost; +/// use solrstice::hosts::solr_server_host::SolrSingleServerHost; +/// use solrstice::models::auth::SolrBasicAuth; +/// +/// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")) +/// .with_auth(SolrBasicAuth::new("solr", Some("SolrRocks"))) +/// .build(); +/// ``` +/// The SolrServerContext is used to create a SolrClient +/// +/// Take a look at [SolrServerContextBuilder](crate::models::context::SolrServerContextBuilder) for more information +#[derive(Clone)] +pub struct SolrServerContext { + pub(crate) host: Arc, + pub(crate) auth: Option>, + pub(crate) client: reqwest::Client, +} + +impl From for SolrServerContext { + fn from(builder: SolrServerContextBuilder) -> Self { + Self { + host: builder.host, + auth: builder.auth, + client: builder.client.unwrap_or_else(reqwest::Client::new), + } + } +} diff --git a/framework/src/models/error.rs b/framework/src/models/error.rs new file mode 100644 index 0000000..e551ffc --- /dev/null +++ b/framework/src/models/error.rs @@ -0,0 +1,54 @@ +use crate::models::response::SolrResponse; +use thiserror::Error; + +/// Main error type for Solrstice +#[derive(Error, Debug)] +pub enum SolrError { + #[error("HTTP Request failed: {}", .0)] + ReqwestError(#[from] reqwest::Error), + #[error("IO Error: {}", .0)] + IOError(#[from] std::io::Error), + #[error("Zip Error: {}", .0)] + ZipError(#[from] zip::result::ZipError), + + #[error("Serde failed: {}", .0)] + SerdeJsonError(#[from] serde_json::Error), + #[error("Error from Solr {code:?}: {msg:?}")] + SolrResponseError { code: usize, msg: String }, + #[error("Zookeeper error: {}", .0)] + ZkError(#[from] zookeeper_async::ZkError), + + #[error("Strip prefix error: {}", .0)] + StripPrefixError(#[from] std::path::StripPrefixError), + + #[error("Solr Connection error: {0}")] + SolrConnectionError(String), + + #[error("Unknown error: {0}")] + Unknown(String), +} + +impl From<&str> for SolrError { + fn from(err: &str) -> Self { + SolrError::Unknown(err.to_string()) + } +} + +/// Helper function to check if a SolrResponse contains an error +pub fn try_solr_error(response: &SolrResponse) -> Result<(), SolrError> { + match &response.error { + None => Ok(()), + Some(err) => { + let mut msg = "Unknown Solr Error".to_string(); + if err.msg.is_some() { + msg = err.msg.clone().unwrap(); + } else if err.trace.is_some() { + msg = err.trace.clone().unwrap(); + } + Err(SolrError::SolrResponseError { + code: err.code, + msg, + }) + } + } +} diff --git a/framework/src/models/group.rs b/framework/src/models/group.rs new file mode 100644 index 0000000..c7a946d --- /dev/null +++ b/framework/src/models/group.rs @@ -0,0 +1,199 @@ +use crate::models::error::SolrError; +use crate::models::response::SolrDocsResponse; +use serde::de::DeserializeOwned; +use serde::{Deserialize, Serialize}; +use serde_json::value::RawValue; + +/// Struct representing a Solr Grouping response +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct SolrGroupResult { + pub matches: usize, + #[serde(rename = "ngroups")] + pub n_groups: Option, + pub groups: Option>, + #[serde(rename = "doclist")] + pub doc_list: Option, +} + +impl SolrGroupResult { + /// Returns a field query result + /// # Examples + /// ```no_run + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// use solrstice::models::auth::SolrBasicAuth; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// use solrstice::queries::components::grouping::GroupingComponentBuilder; + /// use solrstice::queries::select::SelectQueryBuilder; + /// # async fn run() -> Result<(), Box> { + /// # let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let response = SelectQueryBuilder::new() + /// .fq(&["age:[* TO *]"]) + /// .grouping(&GroupingComponentBuilder::new().fields(&["age"]).limit(10)) + /// .execute(&context, "collection_name") + /// .await?; + /// let groups = response.get_groups().ok_or("No groups")?; + /// let age_group = groups.get("age").ok_or("No age group")?; + /// + /// for group in age_group.get_field_result().ok_or("No field result")? { + /// println!("Group key: {}", group.get_group_value::()?); + /// let docs = group.get_doc_list().get_docs::()?; + /// } + /// # Ok(()) + /// # } + /// ``` + pub fn get_field_result(&self) -> Option<&Vec> { + self.groups.as_ref() + } + + /// Returns a grouping query result + /// # Examples + /// ```no_run + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// use solrstice::models::auth::SolrBasicAuth; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// use solrstice::queries::components::grouping::GroupingComponentBuilder; + /// use solrstice::queries::select::SelectQueryBuilder; + /// # async fn run() -> Result<(), Box> { + /// # let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let response = SelectQueryBuilder::new() + /// .grouping( + /// &GroupingComponentBuilder::new() + /// .queries(&["age:[0 TO 59]", "age:[60 TO *]"]) + /// .limit(10), + /// ) + /// .execute(&context, "collection_name") + /// .await?; + /// + /// let groups = response + /// .get_groups().ok_or("No groups")?; + /// let result = groups + /// .get("age:[0 TO 59]").ok_or("No age group")? + /// .get_query_result().ok_or("No query result")?; + /// # Ok(()) + /// # } + /// ``` + pub fn get_query_result(&self) -> Option<&SolrDocsResponse> { + self.doc_list.as_ref() + } + + /// If [GroupFormatting::Simple](crate::queries::components::grouping::GroupFormatting::Simple) is used, returns a simple grouping query result. This uses the same logic as [get_query_result](SolrGroupResult::get_query_result) + /// # Examples + /// ```no_run + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// use solrstice::models::auth::SolrBasicAuth; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// use solrstice::queries::components::grouping::{GroupFormatting, GroupingComponentBuilder}; + /// use solrstice::queries::select::SelectQueryBuilder; + /// # async fn run() -> Result<(), Box> { + /// # let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let response = SelectQueryBuilder::new() + /// .fq(&["age:[* TO *]"]) + /// .grouping(&GroupingComponentBuilder::new().fields(&["age"]).limit(10).format(GroupFormatting::Simple)) + /// .execute(&context, "collection_name") + /// .await?; + /// let groups = response.get_groups().ok_or("No groups")?; + /// let age_group = groups.get("age").ok_or("No age group")?; + /// + /// let result = age_group.get_simple_result().ok_or("No field result")?; + /// # Ok(()) + /// # } + pub fn get_simple_result(&self) -> Option<&SolrDocsResponse> { + self.doc_list.as_ref() + } +} + +/// Struct representing a Solr Grouping field response +/// +/// group_value can be multiple types (int, string), so it is not immediately deserialized +/// # Examples +/// ```no_run +/// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; +/// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; +/// # use solrstice::models::context::SolrServerContextBuilder; +/// # use solrstice::queries::components::grouping::GroupingComponentBuilder; +/// # use solrstice::queries::select::SelectQueryBuilder; +/// # async fn run() -> Result<(), Box> { +/// # let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); +/// # let client = AsyncSolrCloudClient::new(context); +/// let response = client.select(&SelectQueryBuilder::new() +/// .fq(&["age:[* TO *]"]) +/// .grouping(&GroupingComponentBuilder::new().fields(&["age"]).limit(10)), "collection_name").await?; +/// let groups = response +/// .get_groups() +/// .ok_or("No groups found")?; +/// let age_group = groups.get("age").ok_or("No age group")?; +/// for group in age_group.get_field_result().ok_or("No field result")? { +/// println!("Group key: {}", group.get_group_value::()?); +/// let docs = group.get_doc_list().get_docs::()?; +/// } +/// # Ok(()) +/// # } +/// ``` +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct SolrGroupFieldResult { + /// The key of the field result + #[serde(rename = "groupValue")] + pub group_value: Box, + /// A list of documents + #[serde(rename = "doclist")] + pub doc_list: SolrDocsResponse, +} + +impl SolrGroupFieldResult { + /// Returns the group key + /// # Examples + /// ```no_run + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # use solrstice::queries::components::grouping::GroupingComponentBuilder; + /// # use solrstice::queries::select::SelectQueryBuilder; + /// # async fn run() -> Result<(), Box> { + /// # let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// # let client = AsyncSolrCloudClient::new(context); + /// let response = client.select(&SelectQueryBuilder::new() + /// .fq(&["age:[* TO *]"]) + /// .grouping(&GroupingComponentBuilder::new().fields(&["age"]).limit(10)), "collection_name").await?; + /// let groups = response + /// .get_groups() + /// .ok_or("No groups found")?; + /// let age_group = groups.get("age").ok_or("No age group")?; + /// for group in age_group.get_field_result().ok_or("No field result")? { + /// println!("Group key: {}", group.get_group_value::()?); + /// } + /// # Ok(()) + /// # } + /// ``` + pub fn get_group_value(&self) -> Result { + serde_json::from_str(self.group_value.get()).map_err(SolrError::from) + } + + /// Returns a list of documents corresponding to the group + /// # Examples + /// ```no_run + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # use solrstice::queries::components::grouping::GroupingComponentBuilder; + /// # use solrstice::queries::select::SelectQueryBuilder; + /// # async fn run() -> Result<(), Box> { + /// # let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// # let client = AsyncSolrCloudClient::new(context); + /// let response = client.select(&SelectQueryBuilder::new() + /// .fq(&["age:[* TO *]"]) + /// .grouping(&GroupingComponentBuilder::new().fields(&["age"]).limit(10)), "collection_name").await?; + /// let groups = response + /// .get_groups() + /// .ok_or("No groups found")?; + /// let age_group = groups.get("age").ok_or("No age group")?; + /// for group in age_group.get_field_result().ok_or("No field result")? { + /// println!("Group key: {}", group.get_group_value::()?); + /// let docs = group.get_doc_list().get_docs::()?; + /// } + /// # Ok(()) + /// # } + /// ``` + pub fn get_doc_list(&self) -> &SolrDocsResponse { + &self.doc_list + } +} diff --git a/framework/src/models/mod.rs b/framework/src/models/mod.rs new file mode 100644 index 0000000..4395d7d --- /dev/null +++ b/framework/src/models/mod.rs @@ -0,0 +1,14 @@ +//! Models used by the Solr Client. + +/// All authentication types supported by the library. +pub mod auth; +/// Commit types for Solr's update and delete queries. +pub mod commit_type; +/// Context for the solr Client. Specifying how to connect. +pub mod context; +/// Error types for the library. +pub mod error; +/// Models used by the GroupingComponent. +pub mod group; +/// Models used to get responses from Solr +pub mod response; diff --git a/framework/src/models/response.rs b/framework/src/models/response.rs new file mode 100644 index 0000000..bab6ff8 --- /dev/null +++ b/framework/src/models/response.rs @@ -0,0 +1,171 @@ +use crate::models::error::SolrError; +use crate::models::group::SolrGroupResult; +use serde::de::DeserializeOwned; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_json::value::RawValue; +use std::collections::HashMap; + +/// Response header given by solr, if not `responseHeader=false` is passed. +#[derive(Serialize, Deserialize, Clone, PartialEq, Debug)] +pub struct SolrResponseHeader { + #[serde(rename = "zkConnected")] + /// Whether or not the request was made to a Zookeeper managed Solr instance. + pub zk_connected: Option, + /// The status of the request. 0 if successful. + pub status: usize, + #[serde(rename = "QTime")] + /// The time in milliseconds that the request took to process. + pub q_time: usize, +} + +/// If the request was not successful, this will be populated. +#[derive(Serialize, Deserialize, Clone, PartialEq, Debug)] +pub struct SolrResponseError { + /// The message of the error. + pub msg: Option, + /// The trace of the error. + pub trace: Option, + /// The code of the error. + pub code: usize, +} + +/// Documentation response from Solr. The docs are not immediately deserialized to allow for reading the other fields first. +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct SolrDocsResponse { + /// The number of documents found. + #[serde(rename = "numFound")] + pub num_found: usize, + /// The start index of the documents. + pub start: usize, + #[serde(rename = "numFoundExact")] + /// Whether or not the number of documents found is exact. + pub num_found_exact: bool, + /// The documents returned by the query. Use [`SolrDocsResponse::get_docs`] to deserialize. + docs: Box, +} + +/// Represents any response Solr can give. This is the top level response. +#[derive(Serialize, Deserialize, Clone, Debug)] +pub struct SolrResponse { + /// The response header given by Solr if not `responseHeader=false` is passed. + #[serde(rename = "responseHeader")] + pub(crate) response_header: Option, + /// The error given by Solr if the request was not successful. + pub(crate) error: Option, + /// Aliases given by solr from [AsyncSolrCloudClient::get_aliases](crate::clients::async_cloud_client::AsyncSolrCloudClient::get_aliases). + #[serde(default)] + #[serde(deserialize_with = "from_alias")] + pub(crate) aliases: Option>>, + /// The facets given by Solr if `facet=true` is passed. + pub(crate) facets: Option>, + /// The response given by Solr on a select request + pub(crate) response: Option, + /// The config sets that exist on the server. + /// + /// Returned if using [AsyncSolrCloudClient::get_configs](crate::clients::async_cloud_client::AsyncSolrCloudClient::get_configs). + #[serde(rename = "configSets")] + pub(crate) config_sets: Option>, + /// The collections that exist on the server. + /// + /// Returned if using [AsyncSolrCloudClient::get_collections](crate::clients::async_cloud_client::AsyncSolrCloudClient::get_collections). + pub(crate) collections: Option>, + /// Grouping results returned by Solr if `group=true` is passed. + pub(crate) grouped: Option>, + /// The next cursor mark returned by Solr if [SelectQueryBuilder::cursor_mark](crate::queries::select::SelectQueryBuilder::cursor_mark) is passed. + #[serde(rename = "nextCursorMark")] + pub next_cursor_mark: Option, +} + +impl SolrResponse { + /// Get the docs returned by a select request. + /// # Examples + /// ```no_run + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::auth::SolrBasicAuth; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # use solrstice::queries::components::grouping::GroupingComponentBuilder; + /// # use solrstice::queries::select::SelectQueryBuilder; + /// # async fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).with_auth(SolrBasicAuth::new("solr", Some("SolrRocks"))).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// let response = client.select(&SelectQueryBuilder::new(), "collection").await?; + /// Ok(()) + /// # } + /// ``` + pub fn get_response(&self) -> Option<&SolrDocsResponse> { + self.response.as_ref() + } + + /// Get the groups returned by a select request using the [GroupingComponentBuilder](crate::queries::components::grouping::GroupingComponentBuilder). + /// + /// # Examples + /// ```no_run + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::auth::SolrBasicAuth; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # use solrstice::queries::components::grouping::GroupingComponentBuilder; + /// # use solrstice::queries::select::SelectQueryBuilder; + /// # async fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).with_auth(SolrBasicAuth::new("solr", Some("SolrRocks"))).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// let groups = client.select(&SelectQueryBuilder::new() + /// .grouping( + /// &GroupingComponentBuilder::new() + /// .queries(&["age:[0 TO 59]", "age:[60 TO *]"]) + /// .limit(10), + /// ), "collection").await? + /// .get_groups().ok_or("No groups returned")?; + /// Ok(()) + /// # } + /// ``` + pub fn get_groups(&self) -> Option<&HashMap> { + self.grouped.as_ref() + } +} + +impl SolrDocsResponse { + /// Deserialize the docs returned by a select request. + /// + /// # Examples + /// ```no_run + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::auth::SolrBasicAuth; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # use solrstice::queries::components::grouping::GroupingComponentBuilder; + /// # use solrstice::queries::select::SelectQueryBuilder; + /// # async fn run() -> Result<(), Box> { + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).with_auth(SolrBasicAuth::new("solr", Some("SolrRocks"))).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// let response = client.select(&SelectQueryBuilder::new(), "collection").await?; + /// let docs = response.get_response().unwrap().get_docs::()?; + /// Ok(()) + /// # } + /// ``` + pub fn get_docs(&self) -> Result, SolrError> { + serde_json::from_str::>(self.docs.get()).map_err(|e| e.into()) + } +} + +fn from_alias<'de, D>(deserializer: D) -> Result>>, D::Error> +where + D: Deserializer<'de>, +{ + let value_map: Option> = Deserialize::deserialize(deserializer)?; + match value_map { + None => Ok(None), + Some(value_map) => { + let mut return_map: HashMap> = HashMap::new(); + for (key, values) in value_map { + if values.len() > 0 { + return_map.insert(key, values.split(",").map(|x| x.to_string()).collect()); + } else { + return_map.insert(key, vec![]); + } + } + Ok(Some(return_map)) + } + } +} diff --git a/framework/src/queries/alias.rs b/framework/src/queries/alias.rs new file mode 100644 index 0000000..a5c66f5 --- /dev/null +++ b/framework/src/queries/alias.rs @@ -0,0 +1,106 @@ +use crate::models::context::SolrServerContext; +use crate::models::error::SolrError; +use crate::queries::helpers::basic_solr_request; +use std::collections::HashMap; + +/// Get aliases from the Solr server. +/// +/// This is not meant to be used directly, but rather as part of a client. +/// Example usage can be found at [AsyncSolrCloudClient::get_aliases](crate::clients::async_cloud_client::AsyncSolrCloudClient::get_aliases) +pub async fn get_aliases( + context: &SolrServerContext, +) -> Result>, SolrError> { + let query_params = [("action", "LISTALIASES"), ("wt", "json")]; + let json = + basic_solr_request(context, "/solr/admin/collections", query_params.as_ref()).await?; + match json.aliases { + None => Err(SolrError::Unknown( + "Could not find alias key in map".to_string(), + )), + Some(aliases) => Ok(aliases), + } +} + +/// Create an alias with the given name pointing to a list of collections. +/// +/// This is not meant to be used directly, but rather as part of a client. +/// Example usage can be found at [AsyncSolrCloudClient::create_alias](crate::clients::async_cloud_client::AsyncSolrCloudClient::create_alias) +pub async fn create_alias( + context: &SolrServerContext, + name: &str, + collections: &[&str], +) -> Result<(), SolrError> { + let collections = collections.join(","); + let query_params = [ + ("action", "CREATEALIAS"), + ("name", name), + ("collections", collections.as_str()), + ]; + basic_solr_request(context, "/solr/admin/collections", query_params.as_ref()).await?; + Ok(()) +} + +/// Check if an alias with the given name exists. +/// +/// This is not meant to be used directly, but rather as part of a client. +/// Example usage can be found at [AsyncSolrCloudClient::alias_exists](crate::clients::async_cloud_client::AsyncSolrCloudClient::alias_exists) +pub async fn alias_exists(context: &SolrServerContext, name: &str) -> Result { + let aliases = get_aliases(context).await?; + Ok(aliases.contains_key(&name.to_string())) +} + +/// Delete an alias with the given name. +/// +/// This is not meant to be used directly, but rather as part of a client. +/// Example usage can be found at [AsyncSolrCloudClient::delete_alias](crate::clients::async_cloud_client::AsyncSolrCloudClient::delete_alias) +pub async fn delete_alias(context: &SolrServerContext, name: &str) -> Result<(), SolrError> { + let query_params = [("action", "DELETEALIAS"), ("name", name), ("wt", "json")]; + basic_solr_request(context, "/solr/admin/collections", query_params.as_ref()).await?; + Ok(()) +} + +#[cfg(feature = "blocking")] +use crate::runtime::RUNTIME; +#[cfg(feature = "blocking")] +/// Get aliases from the Solr server. +/// +/// This is not meant to be used directly, but rather as part of a client. +/// Example usage can be found at [BlockingSolrCloudClient::get_aliases](crate::clients::blocking_cloud_client::BlockingSolrCloudClient::get_aliases) +pub fn get_aliases_blocking( + context: &SolrServerContext, +) -> Result>, SolrError> { + RUNTIME.handle().block_on(get_aliases(context)) +} + +#[cfg(feature = "blocking")] +/// Create an alias with the given name pointing to a list of collections. +/// +/// This is not meant to be used directly, but rather as part of a client. +/// Example usage can be found at [BlockingSolrCloudClient::create_alias](crate::clients::blocking_cloud_client::BlockingSolrCloudClient::create_alias) +pub fn create_alias_blocking( + context: &SolrServerContext, + name: &str, + collections: &[&str], +) -> Result<(), SolrError> { + RUNTIME + .handle() + .block_on(create_alias(context, name, collections)) +} + +#[cfg(feature = "blocking")] +/// Check if an alias with the given name exists. +/// +/// This is not meant to be used directly, but rather as part of a client. +/// Example usage can be found at [BlockingSolrCloudClient::alias_exists](crate::clients::blocking_cloud_client::BlockingSolrCloudClient::alias_exists) +pub fn alias_exists_blocking(context: &SolrServerContext, name: &str) -> Result { + RUNTIME.handle().block_on(alias_exists(context, name)) +} + +#[cfg(feature = "blocking")] +/// Delete an alias with the given name. +/// +/// This is not meant to be used directly, but rather as part of a client. +/// Example usage can be found at [BlockingSolrCloudClient::delete_alias](crate::clients::blocking_cloud_client::BlockingSolrCloudClient::delete_alias) +pub fn delete_alias_blocking(context: &SolrServerContext, name: &str) -> Result<(), SolrError> { + RUNTIME.handle().block_on(delete_alias(context, name)) +} diff --git a/framework/src/queries/collection.rs b/framework/src/queries/collection.rs new file mode 100644 index 0000000..541045c --- /dev/null +++ b/framework/src/queries/collection.rs @@ -0,0 +1,92 @@ +use crate::models::context::SolrServerContext; +use crate::models::error::SolrError; +use crate::queries::helpers::basic_solr_request; + +pub async fn create_collection( + builder: &SolrServerContext, + name: &str, + config: &str, + shards: usize, + replication_factor: usize, +) -> Result<(), SolrError> { + let query_params = [ + ("action", "CREATE"), + ("wt", "json"), + ("name", name), + ("numShards", &shards.to_string()), + ("replicationFactor", &replication_factor.to_string()), + ("collection.configName", config), + ]; + basic_solr_request(builder, "/solr/admin/collections", query_params.as_ref()).await?; + Ok(()) +} + +pub async fn get_collections(builder: &SolrServerContext) -> Result, SolrError> { + let query_params = [("action", "LIST"), ("wt", "json")]; + let json = basic_solr_request( + &builder, + &format!("/solr/admin/collections"), + query_params.as_ref(), + ) + .await?; + match json.collections { + None => Err(SolrError::Unknown("Could not get collections".to_string())), + Some(collections) => Ok(collections), + } +} + +pub async fn collection_exists(builder: &SolrServerContext, name: &str) -> Result { + let collections = get_collections(builder).await?; + Ok(collections.contains(&name.to_string())) +} + +pub async fn delete_collection(builder: &SolrServerContext, name: &str) -> Result<(), SolrError> { + let query_params = [("action", "DELETE"), ("name", name)]; + basic_solr_request( + builder, + &format!("/solr/admin/collections"), + query_params.as_ref(), + ) + .await?; + Ok(()) +} + +#[cfg(feature = "blocking")] +use crate::runtime::RUNTIME; +#[cfg(feature = "blocking")] +pub fn create_collection_blocking( + builder: &SolrServerContext, + name: &str, + config: &str, + shards: usize, + replication_factor: usize, +) -> Result<(), SolrError> { + RUNTIME.handle().block_on(create_collection( + builder, + name, + config, + shards, + replication_factor, + )) +} + +#[cfg(feature = "blocking")] +pub fn get_collections_blocking(builder: &SolrServerContext) -> Result, SolrError> { + RUNTIME.handle().block_on(get_collections(builder)) +} + +#[cfg(feature = "blocking")] +pub fn collection_exists_blocking( + builder: &SolrServerContext, + name: &str, +) -> Result { + RUNTIME.handle().block_on(collection_exists(builder, name)) +} + +#[cfg(feature = "blocking")] +pub fn delete_collection_blocking( + builder: &SolrServerContext, + name: &str, +) -> Result<(), SolrError> { + RUNTIME.handle().block_on(delete_collection(builder, name)) +} diff --git a/framework/src/queries/components/grouping.rs b/framework/src/queries/components/grouping.rs new file mode 100644 index 0000000..e2ffdd8 --- /dev/null +++ b/framework/src/queries/components/grouping.rs @@ -0,0 +1,211 @@ +use serde::{Deserialize, Serialize}; +use std::fmt; + +/// How to format groups. The default is GroupFormatting::Grouped. +#[derive(Debug, Deserialize, Serialize, Clone, Copy, PartialEq)] +pub enum GroupFormatting { + #[serde(rename = "grouped")] + Grouped, + #[serde(rename = "simple")] + Simple, +} + +impl fmt::Display for GroupFormatting { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", format!("{:?}", self).to_lowercase()) + } +} + +/// Group documents by a field or query. +/// # Examples +/// ```no_run +/// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; +/// use solrstice::models::auth::SolrBasicAuth; +/// # use solrstice::models::context::SolrServerContextBuilder; +/// use solrstice::queries::components::grouping::GroupingComponentBuilder; +/// use solrstice::queries::select::SelectQueryBuilder; +/// # async fn run() -> Result<(), Box> { +/// # let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); +/// let response = SelectQueryBuilder::new() +/// .fq(&["age:[* TO *]"]) +/// .grouping(&GroupingComponentBuilder::new().fields(&["age"]).limit(10)) +/// .execute(&context, "collection_name") +/// .await?; +/// let groups = response.get_groups().ok_or("No groups")?; +/// let age_group = groups.get("age").ok_or("No age group")?; +/// +/// for group in age_group.get_field_result().ok_or("No field result")? { +/// println!("Group key: {}", group.get_group_value::()?); +/// let docs = group.get_doc_list().get_docs::()?; +/// } +/// # Ok(()) +/// # } +/// ``` +#[derive(Deserialize, Serialize, Clone, Debug, Default, PartialEq)] +pub struct GroupingComponentBuilder { + #[serde(skip_serializing_if = "Option::is_none")] + group: Option, + #[serde(rename = "group.field", skip_serializing_if = "Option::is_none")] + pub field: Option>, + #[serde(rename = "group.query", skip_serializing_if = "Option::is_none")] + pub queries: Option>, + #[serde(rename = "group.limit", skip_serializing_if = "Option::is_none")] + pub limit: Option, + #[serde(rename = "group.offset", skip_serializing_if = "Option::is_none")] + pub offset: Option, + #[serde(rename = "group.sort", skip_serializing_if = "Option::is_none")] + pub sort: Option>, + #[serde(rename = "group.format", skip_serializing_if = "Option::is_none")] + pub format: Option, + #[serde(rename = "group.main", skip_serializing_if = "Option::is_none")] + pub main: Option, + #[serde(rename = "group.ngroups", skip_serializing_if = "Option::is_none")] + pub n_groups: Option, + #[serde(rename = "group.truncate", skip_serializing_if = "Option::is_none")] + pub truncate: Option, + #[serde(rename = "group.facet", skip_serializing_if = "Option::is_none")] + pub facet: Option, +} + +impl GroupingComponentBuilder { + /// Create a new GroupingComponentBuilder. + /// # Examples + /// ```no_run + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// use solrstice::models::auth::SolrBasicAuth; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// use solrstice::queries::components::grouping::GroupingComponentBuilder; + /// use solrstice::queries::select::SelectQueryBuilder; + /// # async fn run() -> Result<(), Box> { + /// # let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let response = SelectQueryBuilder::new() + /// .fq(&["age:[* TO *]"]) + /// .grouping(&GroupingComponentBuilder::new().fields(&["age"]).limit(10)) + /// .execute(&context, "collection_name") + /// .await?; + /// let groups = response.get_groups().ok_or("No groups")?; + /// let age_group = groups.get("age").ok_or("No age group")?; + /// + /// for group in age_group.get_field_result().ok_or("No field result")? { + /// println!("Group key: {}", group.get_group_value::()?); + /// let docs = group.get_doc_list().get_docs::()?; + /// } + /// # Ok(()) + /// # } + /// ``` + pub fn new() -> Self { + Self { + group: Some(true), + field: None, + queries: None, + limit: None, + offset: None, + sort: None, + format: None, + main: None, + n_groups: None, + truncate: None, + facet: None, + } + } + + /// Fields to group by. + /// # Examples + /// ```rust + /// use solrstice::queries::components::grouping::GroupingComponentBuilder; + /// GroupingComponentBuilder::new().fields(&["age"]); + /// ``` + pub fn fields(mut self, fields: &[&str]) -> Self { + self.field = Some(fields.into_iter().map(|x| x.to_string()).collect()); + self + } + + /// Queries to group by. + /// # Examples + /// ```rust + /// use solrstice::queries::components::grouping::GroupingComponentBuilder; + /// GroupingComponentBuilder::new().queries(&["age:[0 TO 59]", "age:[60 TO *]"]); + /// ``` + pub fn queries(mut self, queries: &[&str]) -> Self { + self.queries = Some(queries.into_iter().map(|x| x.to_string()).collect()); + self + } + + /// Maximum number of documents per group. + /// # Examples + /// ```rust + /// use solrstice::queries::components::grouping::GroupingComponentBuilder; + /// GroupingComponentBuilder::new().limit(10); + /// ``` + pub fn limit(mut self, limit: usize) -> Self { + self.limit = Some(limit); + self + } + + /// Initial offset + /// # Examples + /// ```rust + /// use solrstice::queries::components::grouping::GroupingComponentBuilder; + /// GroupingComponentBuilder::new().limit(10).offset(10); + /// ``` + pub fn offset(mut self, offset: usize) -> Self { + self.offset = Some(offset); + self + } + + /// How to sort the documents in the groups. + /// # Examples + /// ```rust + /// use solrstice::queries::components::grouping::GroupingComponentBuilder; + /// GroupingComponentBuilder::new().sort(&["age asc"]); + /// ``` + pub fn sort(mut self, sort: &[&str]) -> Self { + self.sort = Some(sort.into_iter().map(|x| x.to_string()).collect()); + self + } + + /// How to format the groups. + /// # Examples + /// ```rust + /// use solrstice::queries::components::grouping::{GroupingComponentBuilder, GroupFormatting}; + /// GroupingComponentBuilder::new().format(GroupFormatting::Simple); + /// ``` + pub fn format(mut self, format: GroupFormatting) -> Self { + self.format = Some(format); + self + } + + /// Put the results in the main result set. + /// # Examples + /// ```rust + /// use solrstice::queries::components::grouping::GroupingComponentBuilder; + /// GroupingComponentBuilder::new().main(true); + /// ``` + pub fn main(mut self, main: bool) -> Self { + self.main = Some(main); + self + } + + /// Include the number of groups that have matched the query. + /// # Examples + /// ```rust + /// use solrstice::queries::components::grouping::GroupingComponentBuilder; + /// GroupingComponentBuilder::new().n_groups(true); + /// ``` + pub fn n_groups(mut self, n_groups: bool) -> Self { + self.n_groups = Some(n_groups); + self + } + + /// Not really sure what this does. + pub fn truncate(mut self, truncate: bool) -> Self { + self.truncate = Some(truncate); + self + } + + /// Not really sure what this does. + pub fn facet(mut self, facet: bool) -> Self { + self.facet = Some(facet); + self + } +} diff --git a/framework/src/queries/components/mod.rs b/framework/src/queries/components/mod.rs new file mode 100644 index 0000000..79eeb61 --- /dev/null +++ b/framework/src/queries/components/mod.rs @@ -0,0 +1,4 @@ +//! Components for queries. + +/// Grouping component +pub mod grouping; diff --git a/framework/src/queries/config.rs b/framework/src/queries/config.rs new file mode 100644 index 0000000..33c8264 --- /dev/null +++ b/framework/src/queries/config.rs @@ -0,0 +1,130 @@ +use crate::models::context::SolrServerContext; +use crate::models::error::{try_solr_error, SolrError}; +use crate::models::response::SolrResponse; +use crate::queries::helpers::basic_solr_request; +use std::fs::File; +use std::io::{Read, Seek, Write}; +use std::path::Path; +use tempfile::tempfile; +use walkdir::{DirEntry, WalkDir}; +use zip::write::FileOptions; + +// https://github.com/zip-rs/zip/blob/e32db515a2a4c7d04b0bf5851912a399a4cbff68/examples/write_dir.rs +fn zip_dir( + it: &mut dyn Iterator, + prefix: &Path, + writer: T, + method: zip::CompressionMethod, +) -> Result<(), SolrError> +where + T: Write + Seek, +{ + let mut zip = zip::ZipWriter::new(writer); + let options = FileOptions::default().compression_method(method); + + let mut buffer = Vec::new(); + for entry in it { + let path = entry.path(); + let name = path.strip_prefix(prefix)?; + if path.is_file() { + zip.start_file(name.to_str().unwrap(), options)?; + let mut f = File::open(path)?; + + f.read_to_end(&mut buffer)?; + zip.write_all(&buffer)?; + buffer.clear(); + } else if !name.as_os_str().is_empty() { + zip.add_directory(name.to_str().unwrap(), options)?; + } + } + zip.finish()?; + Ok(()) +} +pub async fn upload_config( + builder: &SolrServerContext, + name: &str, + path: &Path, +) -> Result<(), SolrError> { + let query_params = [("action", "UPLOAD"), ("name", name)]; + let mut request = builder + .client + .post(format!( + "{}/solr/admin/configs", + builder.host.get_solr_node().await? + )) + .header("Content-Type", "application/octet-stream") + .query(&query_params); + if let Some(auth) = &builder.auth { + request = auth.add_auth_to_request(request) + } + let mut outfile = tempfile()?; + path.try_exists()?; + if path.is_dir() { + let walkdir = WalkDir::new(path); + let it = walkdir.into_iter(); + zip_dir( + &mut it.filter_map(|e| e.ok()), + path, + &outfile, + zip::CompressionMethod::Stored, + )?; + outfile.rewind()?; + } else { + outfile = File::open(path)?; + } + let mut vec = Vec::new(); + outfile.read_to_end(&mut vec)?; + request = request.body(vec); + let json = request.send().await?.json::().await?; + try_solr_error(&json)?; + Ok(()) +} + +pub async fn get_configs(builder: &SolrServerContext) -> Result, SolrError> { + let query_params = [("action", "LIST"), ("wt", "json")]; + let json = basic_solr_request(builder, "/solr/admin/configs", query_params.as_ref()).await?; + match json.config_sets { + None => Err(SolrError::Unknown("Could not get configsets".to_string())), + Some(config_sets) => Ok(config_sets), + } +} + +pub async fn config_exists(builder: &SolrServerContext, name: &str) -> Result { + let configs = get_configs(builder).await?; + Ok(configs.contains(&name.to_string())) +} + +pub async fn delete_config(builder: &SolrServerContext, name: &str) -> Result<(), SolrError> { + let query_params = [("action", "DELETE"), ("name", name)]; + basic_solr_request(builder, "/solr/admin/configs", query_params.as_ref()).await?; + Ok(()) +} + +#[cfg(feature = "blocking")] +use crate::runtime::RUNTIME; + +#[cfg(feature = "blocking")] +pub fn upload_config_blocking( + builder: &SolrServerContext, + name: &str, + path: &Path, +) -> Result<(), SolrError> { + RUNTIME + .handle() + .block_on(upload_config(builder, name, path)) +} + +#[cfg(feature = "blocking")] +pub fn get_configs_blocking(builder: &SolrServerContext) -> Result, SolrError> { + RUNTIME.handle().block_on(get_configs(builder)) +} + +#[cfg(feature = "blocking")] +pub fn config_exists_blocking(builder: &SolrServerContext, name: &str) -> Result { + RUNTIME.handle().block_on(config_exists(builder, name)) +} + +#[cfg(feature = "blocking")] +pub fn delete_config_blocking(builder: &SolrServerContext, name: &str) -> Result<(), SolrError> { + RUNTIME.handle().block_on(delete_config(builder, name)) +} diff --git a/framework/src/queries/helpers.rs b/framework/src/queries/helpers.rs new file mode 100644 index 0000000..16bdc00 --- /dev/null +++ b/framework/src/queries/helpers.rs @@ -0,0 +1,20 @@ +use crate::models::context::SolrServerContext; +use crate::models::error::{try_solr_error, SolrError}; +use crate::models::response::SolrResponse; + +pub async fn basic_solr_request( + builder: &SolrServerContext, + url: &str, + query_params: &[(&str, &str)], +) -> Result { + let mut request = builder + .client + .get(format!("{}{}", &builder.host.get_solr_node().await?, url)) + .query(query_params); + if let Some(auth) = &builder.auth { + request = auth.add_auth_to_request(request); + } + let solr_response = request.send().await?.json::().await?; + try_solr_error(&solr_response)?; + Ok(solr_response) +} diff --git a/framework/src/queries/index.rs b/framework/src/queries/index.rs new file mode 100644 index 0000000..96cd225 --- /dev/null +++ b/framework/src/queries/index.rs @@ -0,0 +1,306 @@ +use crate::models::commit_type::CommitType; +use crate::models::context::SolrServerContext; +use crate::models::error::{try_solr_error, SolrError}; +use crate::models::response::SolrResponse; +use serde::{Deserialize, Serialize}; + +/// A builder for the update handler. +/// # Examples +/// ```no_run +/// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; +/// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; +/// # use solrstice::models::context::SolrServerContextBuilder; +/// # use solrstice::queries::index::UpdateQueryBuilder; +/// # use serde::Serialize; +/// # use solrstice::models::commit_type::CommitType; +/// # async fn run() -> Result<(), Box> { +/// #[derive(Serialize)] +/// struct Data {id: String} +/// +/// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); +/// let client = AsyncSolrCloudClient::new(context); +/// let response = client.index(&UpdateQueryBuilder::new().commit_type(CommitType::Soft), "collection_name", &[Data {id: "test".to_string()}]).await?; +/// # Ok(()) +/// # } +/// ``` +#[derive(Clone, Default, Serialize, Deserialize, PartialEq, Debug)] +pub struct UpdateQueryBuilder { + pub handler: String, + pub commit_type: CommitType, +} + +impl UpdateQueryBuilder { + /// Create a new instance of UpdateQueryBuilder. + /// # Examples + /// ```no_run + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # use solrstice::queries::index::UpdateQueryBuilder; + /// # use serde::Serialize; + /// # use solrstice::models::commit_type::CommitType; + /// # async fn run() -> Result<(), Box> { + /// #[derive(Serialize)] + /// struct Data {id: String} + /// + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// let response = client.index(&UpdateQueryBuilder::new().commit_type(CommitType::Soft), "collection_name", &[Data {id: "test".to_string()}]).await?; + /// # Ok(()) + /// # } + /// ``` + pub fn new() -> Self { + UpdateQueryBuilder { + handler: "update".to_string(), + commit_type: CommitType::Hard, + } + } + + /// Set the handler for the query. Default is "update". + /// # Examples + /// ```no_run + /// use solrstice::queries::index::UpdateQueryBuilder; + /// let builder = UpdateQueryBuilder::new().handler("custom_handler"); + /// ``` + pub fn handler(mut self, handler: &str) -> Self { + self.handler = handler.to_string(); + self + } + + /// Set the commit type for the query. Default is CommitType::Hard. + /// # Examples + /// ```no_run + /// use solrstice::models::commit_type::CommitType; + /// use solrstice::queries::index::UpdateQueryBuilder; + /// let builder = UpdateQueryBuilder::new().commit_type(CommitType::Soft); + /// ``` + pub fn commit_type(mut self, commit_type: CommitType) -> Self { + self.commit_type = commit_type; + self + } + + /// Execute the query. + /// + /// This is not meant to be used directly. Use [AsyncSolrCloudClient::index](crate::clients::async_cloud_client::AsyncSolrCloudClient::index) instead. + pub async fn execute( + &self, + builder: &SolrServerContext, + collection: &str, + data: &[T], + ) -> Result { + let solr_url = format!( + "{}/solr/{}/{}", + &builder.host.get_solr_node().await?, + &collection, + &self.handler + ); + + let mut request = builder + .client + .post(solr_url) + .query(&[("overwrite", "true"), ("wt", "json")]) + .json(data); + if let Some(auth) = &builder.auth { + request = auth.add_auth_to_request(request) + } + + match self.commit_type { + CommitType::Hard => request = request.query(&[("commit", "true")]), + CommitType::Soft => request = request.query(&[("softCommit", "true")]), + } + let json = request.send().await?.json::().await?; + try_solr_error(&json)?; + Ok(json) + } +} + +/// A builder for deleting documents. +/// +/// Since there is no way to delete properly with JSON, it uses XML. +/// # Examples +/// ```no_run +/// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; +/// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; +/// # use solrstice::models::context::SolrServerContextBuilder; +/// # use solrstice::queries::index::DeleteQueryBuilder; +/// # use serde::Serialize; +/// # use solrstice::models::commit_type::CommitType; +/// # async fn run() -> Result<(), Box> { +/// #[derive(Serialize)] +/// struct Data {id: String} +/// +/// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); +/// let client = AsyncSolrCloudClient::new(context); +/// let response = client.delete(&DeleteQueryBuilder::new().ids(&["document1", "document2"]), "collection_name").await?; +/// # Ok(()) +/// # } +/// ``` +#[derive(Clone, Default, Serialize, Deserialize, PartialEq, Debug)] +pub struct DeleteQueryBuilder { + /// The handler for the query. Default is "update". + pub handler: String, + /// The commit type for the query. Default is CommitType::Hard. + pub commit_type: CommitType, + /// Ids to delete + pub ids: Option>, + /// Queries to delete + pub queries: Option>, +} + +impl DeleteQueryBuilder { + /// Create a new instance of DeleteQueryBuilder. + /// # Examples + /// ```no_run + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # use solrstice::queries::index::DeleteQueryBuilder; + /// # use serde::Serialize; + /// # use solrstice::models::commit_type::CommitType; + /// # async fn run() -> Result<(), Box> { + /// #[derive(Serialize)] + /// struct Data {id: String} + /// + /// let context = SolrServerContextBuilder::new(SolrSingleServerHost::new("http://localhost:8983")).build(); + /// let client = AsyncSolrCloudClient::new(context); + /// let response = client.delete(&DeleteQueryBuilder::new().ids(&["document1", "document2"]), "collection_name").await?; + /// # Ok(()) + /// # } + /// ``` + pub fn new() -> Self { + DeleteQueryBuilder { + handler: "update".to_string(), + commit_type: CommitType::Hard, + ids: None, + queries: None, + } + } + + /// Set the handler for the query. Default is "update". + /// # Examples + /// ```no_run + /// use solrstice::queries::index::DeleteQueryBuilder; + /// let builder = DeleteQueryBuilder::new().handler("custom_handler"); + /// ``` + pub fn handler(mut self, handler: &str) -> Self { + self.handler = handler.to_string(); + self + } + + /// Set the commit_type for the query. Default is CommitType::Hard. + /// # Examples + /// ```no_run + /// use solrstice::models::commit_type::CommitType; + /// use solrstice::queries::index::DeleteQueryBuilder; + /// let builder = DeleteQueryBuilder::new().commit_type(CommitType::Soft); + /// ``` + pub fn commit_type(mut self, commit_type: CommitType) -> Self { + self.commit_type = commit_type; + self + } + + /// Set the ids to delete + /// # Examples + /// ```no_run + /// use solrstice::queries::index::DeleteQueryBuilder; + /// let builder = DeleteQueryBuilder::new().ids(&["document1", "document2"]); + /// ``` + pub fn ids(mut self, ids: &[&str]) -> Self { + self.ids = Some(ids.iter().map(|s| s.to_string()).collect()); + self + } + + /// Set the queries to delete + /// # Examples + /// ```no_run + /// use solrstice::queries::index::DeleteQueryBuilder; + /// let builder = DeleteQueryBuilder::new().queries(&["age:[* TO *]"]); + /// ``` + pub fn queries(mut self, queries: &[&str]) -> Self { + self.queries = Some(queries.iter().map(|s| s.to_string()).collect()); + self + } + + /// Execute the query. + /// + /// This is not meant to be used directly. Use [AsyncSolrCloudClient::delete](crate::clients::async_cloud_client::AsyncSolrCloudClient::delete) instead. + pub async fn execute( + &self, + context: &SolrServerContext, + collection: &str, + ) -> Result { + let solr_url = format!( + "{}/solr/{}/{}", + &context.host.get_solr_node().await?, + &collection, + &self.handler + ); + let ids = self.ids.as_ref().map(|ids| { + ids.iter() + .map(|id| format!("{}", id)) + .collect::>() + .join("") + }); + let queries = self.queries.as_ref().map(|queries| { + queries + .iter() + .map(|query| format!("{}", query)) + .collect::>() + .join("") + }); + + let mut request = context + .client + .post(solr_url) + .query(&[("overwrite", "true"), ("wt", "json")]) + .header("Content-Type", "application/xml") + .body(format!( + "{}{}", + ids.unwrap_or_default(), + queries.unwrap_or_default() + )); + if let Some(auth) = &context.auth { + request = auth.add_auth_to_request(request) + } + + match self.commit_type { + CommitType::Hard => request = request.query(&[("commit", "true")]), + CommitType::Soft => request = request.query(&[("softCommit", "true")]), + } + let json = request.send().await?.json::().await?; + try_solr_error(&json)?; + Ok(json) + } +} + +#[cfg(feature = "blocking")] +use crate::runtime::RUNTIME; +#[cfg(feature = "blocking")] +impl UpdateQueryBuilder { + /// Execute the query. + /// + /// This is not meant to be used directly. Use [BlockingSolrCloudClient::index](crate::clients::blocking_cloud_client::BlockingSolrCloudClient::index) instead. + pub fn execute_blocking( + &self, + context: &SolrServerContext, + collection: &str, + data: &[T], + ) -> Result { + RUNTIME + .handle() + .block_on(self.execute(context, collection, data)) + } +} +#[cfg(feature = "blocking")] +impl DeleteQueryBuilder { + /// Execute the query. + /// + /// This is not meant to be used directly. Use [BlockingSolrCloudClient::delete](crate::clients::blocking_cloud_client::BlockingSolrCloudClient::delete) instead. + pub fn execute_blocking( + &self, + context: &SolrServerContext, + collection: &str, + ) -> Result { + RUNTIME.handle().block_on(self.execute(context, collection)) + } +} diff --git a/framework/src/queries/mod.rs b/framework/src/queries/mod.rs new file mode 100644 index 0000000..5e2fa0a --- /dev/null +++ b/framework/src/queries/mod.rs @@ -0,0 +1,16 @@ +//! The different query types this library supports. + +/// Alias API +pub mod alias; +/// Collection API +pub mod collection; +/// Components for select queries +pub mod components; +/// Config API +pub mod config; +/// Helper functions +pub mod helpers; +/// Index and Delete API +pub mod index; +/// Select query API +pub mod select; diff --git a/framework/src/queries/select.rs b/framework/src/queries/select.rs new file mode 100644 index 0000000..fad6a8e --- /dev/null +++ b/framework/src/queries/select.rs @@ -0,0 +1,282 @@ +use crate::models::context::SolrServerContext; +use crate::models::error::{try_solr_error, SolrError}; +use crate::models::response::SolrResponse; +use crate::queries::components::grouping::GroupingComponentBuilder; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Serialize, Deserialize, PartialEq, Debug)] +struct PostQueryWrapper { + pub params: SelectQueryBuilder, +} + +/// Builder for a select query. +/// +/// Also take a look at [AsyncSolrCloudClient::select](crate::clients::async_cloud_client::AsyncSolrCloudClient::select) +/// ```rust +/// use solrstice::queries::select::SelectQueryBuilder; +/// SelectQueryBuilder::new().fq(&["field1:val1", "field2:val2"]).q("*:*").rows(10).start(0); +/// ``` +#[derive(Serialize, Deserialize, Clone, Default, PartialEq, Debug)] +pub struct SelectQueryBuilder { + pub q: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub fq: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub fl: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub sort: Option>, + pub handle: String, + pub rows: usize, + pub start: usize, + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(rename = "cursorMark")] + pub cursor_mark: Option, + #[serde(flatten)] + pub grouping: Option, +} + +impl SelectQueryBuilder { + /// Builder for a select query. + /// + /// Also take a look at [AsyncSolrCloudClient::select](crate::clients::async_cloud_client::AsyncSolrCloudClient::select) + /// ```rust + /// use solrstice::queries::select::SelectQueryBuilder; + /// SelectQueryBuilder::new().fq(&["field1:val1", "field2:val2"]).q("*:*").rows(10).start(0); + /// ``` + pub fn new() -> Self { + SelectQueryBuilder { + q: "*:*".to_string(), + fq: None, + fl: None, + sort: None, + handle: "select".to_string(), + rows: 10, + start: 0, + cursor_mark: None, + grouping: None, + } + } + + /// Set the q parameter. Default is "*:*" + pub fn q(mut self, q: &str) -> Self { + self.q = q.to_string(); + self + } + + /// A list of filter queries + /// ```rust + /// use solrstice::queries::select::SelectQueryBuilder; + /// SelectQueryBuilder::new().fq(&["id:1"]); + /// ``` + pub fn fq(mut self, queries: &[&str]) -> Self { + self.fq = Some(queries.into_iter().map(|x| x.to_string()).collect()); + self + } + + /// Set the fields to return + /// ```rust + /// use solrstice::queries::select::SelectQueryBuilder; + /// SelectQueryBuilder::new().fl(&["field1", "field2"]); + /// ``` + pub fn fl(mut self, fields: &[&str]) -> Self { + self.fl = Some(fields.into_iter().map(|x| x.to_string()).collect()); + self + } + + ///Set the sort order + ///```rust + /// use solrstice::queries::select::SelectQueryBuilder; + /// SelectQueryBuilder::new().sort(&["id asc", "field1 desc"]); + /// ``` + pub fn sort(mut self, sort: &[&str]) -> Self { + self.sort = Some(sort.into_iter().map(|x| x.to_string()).collect()); + self + } + + /// How many rows to return + /// ```rust + /// use solrstice::queries::select::SelectQueryBuilder; + /// SelectQueryBuilder::new().rows(1000); + /// ``` + pub fn rows(mut self, rows: usize) -> Self { + self.rows = rows; + self + } + + /// The offset to start from + /// ```rust + /// use solrstice::queries::select::SelectQueryBuilder; + /// SelectQueryBuilder::new().start(10); + /// ``` + pub fn start(mut self, start: usize) -> Self { + self.start = start; + self + } + + /// Use a cursor mark to iterate over the results + /// Default starts with "*", and which causes [SolrResponse::next_cursor_mark](crate::models::response::SolrResponse::next_cursor_mark) to be set. And can be provided for the next select. + /// ```no_run + /// use solrstice::queries::select::SelectQueryBuilder; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// # use solrstice::clients::async_cloud_client; + /// use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # async fn run() -> Result<(), Box> { + /// # let client = AsyncSolrCloudClient::new(SolrServerContextBuilder::new(SolrSingleServerHost::new("localhost:8983")).build()); + /// let mut builder = SelectQueryBuilder::new().cursor_mark("*"); + /// let response = client.select(&builder, "collection").await?; + /// let mut cursor_mark = response.next_cursor_mark.ok_or("No cursor mark")?; + /// loop { + /// if cursor_mark == "*" { + /// break; + /// } + /// else { + /// builder = builder.cursor_mark(&cursor_mark); + /// let response = client.select(&builder, "collection").await?; + /// cursor_mark = response.next_cursor_mark.ok_or("No cursor mark")?; + /// } + /// } + /// # Ok(()) + /// # } + /// ``` + pub fn cursor_mark(mut self, cursor_mark: &str) -> Self { + self.cursor_mark = Some(cursor_mark.to_string()); + self + } + + /// Do a grouping query. Also take a look at [SolrGroupResult](crate::models::group::SolrGroupResult) and [SolrGroupFieldResult](crate::models::group::SolrGroupFieldResult) + /// # Examples + /// ```no_run + /// # use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; + /// # use solrstice::hosts::solr_server_host::SolrSingleServerHost; + /// # use solrstice::models::context::SolrServerContextBuilder; + /// use solrstice::queries::components::grouping::GroupingComponentBuilder; + /// use solrstice::queries::select::SelectQueryBuilder; + /// # async fn run() -> Result<(), Box> { + /// # let client = AsyncSolrCloudClient::new(SolrServerContextBuilder::new(SolrSingleServerHost::new("localhost:8983")).build()); + /// let builder = SelectQueryBuilder::new() + /// .grouping( + /// &GroupingComponentBuilder::new() + /// .queries(&["age:[0 TO 59]", "age:[60 TO *]"]) + /// .limit(10), + /// ); + /// let response = client.select(&builder, "collection").await?; + /// let groups = response.get_groups().ok_or("No groups")?; + /// let queries = groups.get("age:[0 TO 59]").ok_or("Missing group")?.get_query_result().ok_or("Missing query result")?; + /// # Ok(()) + /// # } + /// ``` + pub fn grouping(mut self, grouping: &GroupingComponentBuilder) -> Self { + self.grouping = Some(grouping.clone()); + self + } + + pub async fn execute( + &self, + builder: &SolrServerContext, + collection: &str, + ) -> Result { + let solr_url = format!( + "{}/solr/{}/{}", + builder.host.get_solr_node().await?, + collection, + &self.handle + ); + let wrapper = PostQueryWrapper { + params: self.clone(), + }; + let mut request = builder + .client + .post(&solr_url) + .json::(&wrapper); + if let Some(auth) = &builder.auth { + request = auth.add_auth_to_request(request); + } + let data = request.send().await?.json::().await?; + try_solr_error(&data)?; + Ok(data) + } +} + +#[cfg(feature = "blocking")] +use crate::runtime::RUNTIME; +#[cfg(feature = "blocking")] +impl SelectQueryBuilder { + pub fn execute_blocking( + &self, + builder: &SolrServerContext, + collection: &str, + ) -> Result { + RUNTIME.handle().block_on(self.execute(builder, collection)) + } +} + +#[cfg(test)] +pub mod tests { + use crate::queries::components::grouping::GroupingComponentBuilder; + use crate::queries::select::SelectQueryBuilder; + use serde::{Deserialize, Deserializer, Serialize, Serializer}; + + fn bool_to_string(val: &bool, serializer: S) -> Result + where + S: Serializer, + { + match val { + true => serializer.serialize_str("true"), + false => serializer.serialize_str("false"), + } + } + + fn string_to_bool<'de, D>(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + match s.as_str() { + "true" => Ok(true), + "false" => Ok(false), + _ => Err(serde::de::Error::custom("Could not convert string to bool")), + } + } + + fn is_false(b: &bool) -> bool { + !(*b) + } + + #[derive(Serialize, Deserialize, Clone, Default, PartialEq, Debug)] + pub struct TestOuterStruct { + #[serde(flatten)] + pub grouping: Option, + } + + #[derive(Deserialize, Serialize, Clone, Debug, Default, PartialEq)] + pub struct TestInnerStruct { + #[serde(rename = "group.field", skip_serializing_if = "Option::is_none")] + pub field: Option>, + #[serde(rename = "group.query", skip_serializing_if = "Option::is_none")] + pub queries: Option>, + #[serde(rename = "group.limit", skip_serializing_if = "Option::is_none")] + pub limit: Option, + #[serde( + rename = "group.main", + skip_serializing_if = "is_false", + serialize_with = "bool_to_string", + deserialize_with = "string_to_bool" + )] + pub main: bool, + } + + #[test] + pub fn serialize_select_query_builder_works() { + let builder = SelectQueryBuilder::new().fq(&["id:1", "id:2"]).grouping( + &GroupingComponentBuilder::new() + .queries(&["id:1", "id:2"]) + .fields(&["id", "name"]) + .limit(10), + ); + let serialized = serde_json::to_string(&builder).unwrap(); + println!("{}", serialized); + let deserialized = serde_json::from_str::(&serialized).unwrap(); + assert_eq!(builder, deserialized); + } +} diff --git a/framework/src/runtime.rs b/framework/src/runtime.rs new file mode 100644 index 0000000..73b0a3f --- /dev/null +++ b/framework/src/runtime.rs @@ -0,0 +1,5 @@ +lazy_static::lazy_static! { + pub static ref RUNTIME: tokio::runtime::Runtime = { + tokio::runtime::Runtime::new().expect("Failed to create runtime for blocking calls") + }; +} diff --git a/framework/tests/functionality/alias_tests.rs b/framework/tests/functionality/alias_tests.rs new file mode 100644 index 0000000..d115b71 --- /dev/null +++ b/framework/tests/functionality/alias_tests.rs @@ -0,0 +1,45 @@ +use crate::structures::BaseTestsBuildup; +use solrstice::queries::alias::{alias_exists, create_alias, delete_alias}; +use solrstice::queries::collection::{create_collection, delete_collection}; +use solrstice::queries::config::{delete_config, upload_config}; +use std::path::Path; + +#[tokio::test] +async fn create_alias_creates_alias() { + let alias_name = "CreateAliasAlias".to_string(); + let collection_name = "CreateAliasCollection".to_string(); + let config_name = "CreateAliasCollection".to_string(); + + let config = BaseTestsBuildup::new().await; + + let _ = delete_alias(&config.context, &alias_name).await; + let _ = delete_collection(&config.context, &collection_name).await; + let _ = delete_config(&config.context, &config_name).await; + + upload_config( + &config.context, + &config_name, + Path::new(&config.config_path), + ) + .await + .unwrap(); + create_collection(&config.context, &collection_name, &config_name, 1, 1) + .await + .unwrap(); + + assert_eq!( + alias_exists(&config.context, &alias_name).await.unwrap(), + false + ); + create_alias(&config.context, &alias_name, &[&collection_name]) + .await + .unwrap(); + assert_eq!( + alias_exists(&config.context, &alias_name).await.unwrap(), + true + ); + + let _ = delete_alias(&config.context, &alias_name).await.unwrap(); + let _ = delete_collection(&config.context, &collection_name).await; + let _ = delete_config(&config.context, &config_name).await; +} diff --git a/framework/tests/functionality/blocking_tests.rs b/framework/tests/functionality/blocking_tests.rs new file mode 100644 index 0000000..fca9808 --- /dev/null +++ b/framework/tests/functionality/blocking_tests.rs @@ -0,0 +1,28 @@ +use crate::structures::FunctionalityTestsBuildup; +use solrstice::queries::config::get_configs_blocking; +use std::thread; + +#[test] +fn blocking_works_when_simultaneous_connections_multiple_threads() { + let runtime = tokio::runtime::Runtime::new().unwrap(); + let config = runtime.block_on(async { + FunctionalityTestsBuildup::build_up("BlockingMultipleConsumer") + .await + .unwrap() + }); + + let threads = 100; + let mut handles = Vec::new(); + for _ in 0..threads { + let server_request = config.context.clone(); + let handle = thread::spawn(move || { + get_configs_blocking(&server_request).unwrap(); + }); + handles.push(handle); + } + + for handle in handles { + handle.join().unwrap(); + } + runtime.block_on(async { config.tear_down().await.unwrap() }); +} diff --git a/framework/tests/functionality/client_tests.rs b/framework/tests/functionality/client_tests.rs new file mode 100644 index 0000000..fa74bb8 --- /dev/null +++ b/framework/tests/functionality/client_tests.rs @@ -0,0 +1,63 @@ +use crate::structures::BaseTestsBuildup; +use serde::{Deserialize, Serialize}; +use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; +use solrstice::queries::index::{DeleteQueryBuilder, UpdateQueryBuilder}; +use solrstice::queries::select::SelectQueryBuilder; +use std::path::Path; + +#[derive(Serialize, Deserialize, Debug)] +struct TestData { + id: String, +} + +#[tokio::test] +pub async fn client_example_test() { + let name = "example_test"; + let config = BaseTestsBuildup::new().await; + let client = AsyncSolrCloudClient::new(config.context); + + let _ = client.delete_collection(name).await; + let _ = client.delete_config(name).await; + + // Upload config + client + .upload_config(name, Path::new(&config.config_path)) + .await + .unwrap(); + // Create collection + client.create_collection(name, name, 1, 1).await.unwrap(); + + // Index documents + let docs = vec![TestData { + id: "example_document".to_string(), + }]; + client + .index(&UpdateQueryBuilder::new(), name, docs.as_slice()) + .await + .unwrap(); + + // Search documents + let server_docs = client + .select( + &SelectQueryBuilder::new().fq(&["id:example_document"]), + name, + ) + .await + .unwrap() + .get_response() + .unwrap() + .get_docs::() + .unwrap(); + assert_eq!(server_docs.len(), 1); + + // Delete documents + client + .delete(&DeleteQueryBuilder::new().ids(&["example_document"]), name) + .await + .unwrap(); + + // Delete collection + client.delete_collection(name).await.unwrap(); + // Delete config + client.delete_config(name).await.unwrap(); +} diff --git a/framework/tests/functionality/collection_test.rs b/framework/tests/functionality/collection_test.rs new file mode 100644 index 0000000..181ccde --- /dev/null +++ b/framework/tests/functionality/collection_test.rs @@ -0,0 +1,42 @@ +use crate::structures::BaseTestsBuildup; +use solrstice::queries::collection::{collection_exists, create_collection, delete_collection}; +use solrstice::queries::config::{delete_config, upload_config}; +use std::path::Path; + +#[tokio::test] +async fn create_collection_creates_collection() { + let config_name = "CreateCollectionConfig".to_string(); + let collection_name = "CreateCollectionCollection".to_string(); + + let config = BaseTestsBuildup::new().await; + let _ = delete_collection(&config.context, &collection_name).await; + let _ = delete_config(&config.context, &config_name).await; + + assert_eq!( + collection_exists(&config.context, &collection_name) + .await + .unwrap(), + false + ); + upload_config( + &config.context, + &config_name, + Path::new(&config.config_path), + ) + .await + .unwrap(); + create_collection(&config.context, &collection_name, &config_name, 1, 1) + .await + .unwrap(); + assert_eq!( + collection_exists(&config.context, &collection_name) + .await + .unwrap(), + true + ); + + let _ = delete_collection(&config.context, &collection_name) + .await + .unwrap(); + let _ = delete_config(&config.context, &config_name); +} diff --git a/framework/tests/functionality/config_test.rs b/framework/tests/functionality/config_test.rs new file mode 100644 index 0000000..414c981 --- /dev/null +++ b/framework/tests/functionality/config_test.rs @@ -0,0 +1,74 @@ +use crate::structures::BaseTestsBuildup; +use solrstice::models::error::SolrError; +use solrstice::queries::config::{config_exists, delete_config, get_configs, upload_config}; +use std::path::Path; + +#[tokio::test] +async fn upload_config_uploads_config() -> Result<(), SolrError> { + let config = BaseTestsBuildup::new().await; + let _ = delete_config(&config.context, "UploadConfig").await; + assert!(!config_exists(&config.context, "UploadConfig") + .await + .unwrap()); + upload_config( + &config.context, + "UploadConfig", + Path::new(&config.config_path), + ) + .await + .unwrap(); + assert!(config_exists(&config.context, "UploadConfig") + .await + .unwrap()); + delete_config(&config.context, "UploadConfig") + .await + .unwrap(); + Ok(()) +} + +#[tokio::test] +async fn get_configs_gets_configs() -> Result<(), SolrError> { + let config = BaseTestsBuildup::new().await; + let configs = get_configs(&config.context).await.unwrap(); + assert!(configs.contains(&"_default".to_string())); + Ok(()) +} + +#[tokio::test] +async fn delete_config_deletes_config() -> Result<(), SolrError> { + let config = BaseTestsBuildup::new().await; + let _ = delete_config(&config.context, "DeleteConfig").await; + upload_config( + &config.context, + "DeleteConfig", + Path::new(&config.config_path), + ) + .await + .unwrap(); + assert!(config_exists(&config.context, "DeleteConfig") + .await + .unwrap()); + delete_config(&config.context, "DeleteConfig") + .await + .unwrap(); + assert!(!config_exists(&config.context, "DeleteConfig") + .await + .unwrap()); + Ok(()) +} + +#[tokio::test] +async fn config_exists_works_when_config_exists() -> Result<(), SolrError> { + let config = BaseTestsBuildup::new().await; + assert!(config_exists(&config.context, "_default").await.unwrap()); + Ok(()) +} + +#[tokio::test] +async fn config_exists_works_when_config_doesent_exist() -> Result<(), SolrError> { + let config = BaseTestsBuildup::new().await; + assert!(!config_exists(&config.context, "_this_does_not_exist") + .await + .unwrap()); + Ok(()) +} diff --git a/framework/tests/functionality/grouping_tests.rs b/framework/tests/functionality/grouping_tests.rs new file mode 100644 index 0000000..422e3c4 --- /dev/null +++ b/framework/tests/functionality/grouping_tests.rs @@ -0,0 +1,183 @@ +use crate::structures::{get_test_data, FunctionalityTestsBuildup}; +use solrstice::models::error::SolrError; +use solrstice::queries::components::grouping::{GroupFormatting, GroupingComponentBuilder}; +use solrstice::queries::index::UpdateQueryBuilder; +use solrstice::queries::select::SelectQueryBuilder; +use std::collections::HashMap; + +#[tokio::test] +async fn group_fields() -> Result<(), SolrError> { + let config = FunctionalityTestsBuildup::build_up("GroupBasic") + .await + .unwrap(); + let update = UpdateQueryBuilder::new(); + update + .execute(&config.context, &config.collection_name, &get_test_data()) + .await?; + + let response = SelectQueryBuilder::new() + .fq(&["age:[* TO *]"]) + .grouping(&GroupingComponentBuilder::new().fields(&["age"]).limit(10)) + .execute(&config.context, &config.collection_name) + .await?; + let groups = response + .get_groups() + .ok_or(SolrError::Unknown("No groups found".to_string()))?; + let age_group = groups.get("age").unwrap(); + let correct_data: HashMap = [(20, 2), (40, 2), (60, 2)].iter().cloned().collect(); + for group in age_group.get_field_result().unwrap() { + assert_eq!( + *correct_data + .get(&group.get_group_value::().unwrap()) + .unwrap(), + group.doc_list.num_found + ) + } + let _ = config.tear_down().await; + Ok(()) +} + +#[tokio::test] +async fn group_queries() -> Result<(), SolrError> { + let config = FunctionalityTestsBuildup::build_up("GroupQuery") + .await + .unwrap(); + let update = UpdateQueryBuilder::new(); + update + .execute(&config.context, &config.collection_name, &get_test_data()) + .await?; + + let response = SelectQueryBuilder::new() + .grouping( + &GroupingComponentBuilder::new() + .queries(&["age:[0 TO 59]", "age:[60 TO *]"]) + .limit(10), + ) + .execute(&config.context, &config.collection_name) + .await?; + let groups = response + .get_groups() + .ok_or(SolrError::Unknown("Could not get groups".to_string()))?; + let first = groups + .get("age:[0 TO 59]") + .unwrap() + .get_query_result() + .unwrap(); + let second = groups + .get("age:[60 TO *]") + .unwrap() + .get_query_result() + .unwrap(); + assert_eq!(first.num_found, 4); + assert_eq!(second.num_found, 2); + let _ = config.tear_down().await; + Ok(()) +} + +#[tokio::test] +async fn group_n_groups() -> Result<(), SolrError> { + let config = FunctionalityTestsBuildup::build_up("GroupNGroups") + .await + .unwrap(); + let update = UpdateQueryBuilder::new(); + update + .execute(&config.context, &config.collection_name, &get_test_data()) + .await?; + + let response = SelectQueryBuilder::new() + .fq(&["age:[* TO *]"]) + .grouping( + &GroupingComponentBuilder::new() + .fields(&["age"]) + .limit(10) + .n_groups(true), + ) + .execute(&config.context, &config.collection_name) + .await?; + let groups = response + .get_groups() + .ok_or(SolrError::Unknown("Could not get groups".to_string()))?; + let age_group = groups.get("age").unwrap(); + assert_eq!(age_group.n_groups, Some(3)); + let _ = config.tear_down().await; + Ok(()) +} + +#[tokio::test] +async fn group_main() -> Result<(), SolrError> { + let config = FunctionalityTestsBuildup::build_up("GroupMain") + .await + .unwrap(); + let update = UpdateQueryBuilder::new(); + update + .execute(&config.context, &config.collection_name, &get_test_data()) + .await?; + + let result = SelectQueryBuilder::new() + .grouping( + &GroupingComponentBuilder::new() + .queries(&["age:[0 TO 59]"]) + .limit(10) + .main(true), + ) + .execute(&config.context, &config.collection_name) + .await?; + let response = result.get_response().unwrap(); + let main_contents = response.get_docs::().unwrap(); + assert_eq!(response.num_found, 4); + assert_eq!(main_contents.len(), 4); + let _ = config.tear_down().await; + Ok(()) +} + +#[tokio::test] +async fn group_main_false() -> Result<(), SolrError> { + let config = FunctionalityTestsBuildup::build_up("GroupMainFalse") + .await + .unwrap(); + let update = UpdateQueryBuilder::new(); + update + .execute(&config.context, &config.collection_name, &get_test_data()) + .await?; + + let result = SelectQueryBuilder::new() + .grouping( + &GroupingComponentBuilder::new() + .queries(&["age:[0 TO 59]"]) + .limit(10) + .main(false), + ) + .execute(&config.context, &config.collection_name) + .await?; + let response = result.get_response(); + assert!(response.is_none()); + let _ = config.tear_down().await; + Ok(()) +} + +#[tokio::test] +async fn group_simple() -> Result<(), SolrError> { + let config = FunctionalityTestsBuildup::build_up("GroupSimple") + .await + .unwrap(); + let update = UpdateQueryBuilder::new(); + update + .execute(&config.context, &config.collection_name, &get_test_data()) + .await?; + + let result = SelectQueryBuilder::new() + .grouping( + &GroupingComponentBuilder::new() + .fields(&["age"]) + .limit(10) + .format(GroupFormatting::Simple), + ) + .execute(&config.context, &config.collection_name) + .await?; + let response = result.get_groups().ok_or("No groups found")?; + let group = response.get("age").ok_or("age group not found")?; + let group_contents = group.get_simple_result().ok_or("No group contents found")?; + assert_eq!(group_contents.num_found, 8); + let _ = config.tear_down().await; + Ok(()) +} diff --git a/framework/tests/functionality/index_test.rs b/framework/tests/functionality/index_test.rs new file mode 100644 index 0000000..889ab4e --- /dev/null +++ b/framework/tests/functionality/index_test.rs @@ -0,0 +1,119 @@ +use crate::structures::{get_test_data, BaseTestsBuildup, City, FunctionalityTestsBuildup}; +use solrstice::models::error::SolrError; +use solrstice::queries::collection::{create_collection, delete_collection}; +use solrstice::queries::config::{delete_config, upload_config}; +use solrstice::queries::index::{DeleteQueryBuilder, UpdateQueryBuilder}; +use solrstice::queries::select::SelectQueryBuilder; +use std::path::Path; + +#[tokio::test] +async fn index_indexes_documents() -> Result<(), SolrError> { + let config = BaseTestsBuildup::new().await; + let config_name = "IndexConfig".to_string(); + let collection_name = "IndexCollection".to_string(); + + let _ = delete_collection(&config.context, &collection_name).await; + let _ = delete_config(&config.context, &config_name).await; + + upload_config( + &config.context, + &config_name, + Path::new(&config.config_path), + ) + .await + .unwrap(); + create_collection(&config.context, &collection_name, &config_name, 1, 1) + .await + .unwrap(); + + let update = UpdateQueryBuilder::new(); + update + .execute(&config.context, &collection_name, &get_test_data()) + .await?; + + delete_collection(&config.context, &collection_name) + .await + .unwrap(); + delete_config(&config.context, &config_name).await.unwrap(); + Ok(()) +} + +#[tokio::test] +async fn index_indexes_correct_documents() -> Result<(), SolrError> { + let config = BaseTestsBuildup::new().await; + let config_name = "IndexCorrectConfig".to_string(); + let collection_name = "IndexCorrectCollection".to_string(); + + let _ = delete_collection(&config.context, &collection_name).await; + let _ = delete_config(&config.context, &config_name).await; + + upload_config( + &config.context, + &config_name, + Path::new(&config.config_path), + ) + .await + .unwrap(); + create_collection(&config.context, &collection_name, &config_name, 1, 1) + .await + .unwrap(); + + let update = UpdateQueryBuilder::new(); + update + .execute(&config.context, &collection_name, &get_test_data()) + .await?; + + let returned_data = SelectQueryBuilder::new() + .fl(&["*", "[child]"]) + .fq(&["city_name:[* TO *]"]) + .execute(&config.context, &collection_name) + .await + .unwrap() + .get_response() + .unwrap() + .get_docs::() + .unwrap(); + assert_eq!(returned_data, get_test_data()); + + delete_collection(&config.context, &collection_name) + .await + .unwrap(); + delete_config(&config.context, &config_name).await.unwrap(); + Ok(()) +} + +#[tokio::test] +async fn delete_deletes_documents_by_id() { + let test_data_name = "DeleteDeletesById".to_string(); + let config = FunctionalityTestsBuildup::build_up(&test_data_name) + .await + .unwrap(); + let update = UpdateQueryBuilder::new(); + update + .execute(&config.context, &config.collection_name, &get_test_data()) + .await + .unwrap(); + let num_found = SelectQueryBuilder::new() + .execute(&config.context, &config.collection_name) + .await + .unwrap() + .get_response() + .unwrap() + .num_found; + assert_ne!(num_found, 0); + + DeleteQueryBuilder::new() + .queries(&["*:*"]) + .execute(&config.context, &config.collection_name) + .await + .unwrap(); + + let num_found = SelectQueryBuilder::new() + .execute(&config.context, &config.collection_name) + .await + .unwrap() + .get_response() + .unwrap() + .num_found; + assert_eq!(num_found, 0); +} diff --git a/framework/tests/functionality/mod.rs b/framework/tests/functionality/mod.rs new file mode 100644 index 0000000..26e62a4 --- /dev/null +++ b/framework/tests/functionality/mod.rs @@ -0,0 +1,12 @@ +pub mod alias_tests; +pub mod client_tests; +pub mod collection_test; +pub mod config_test; +pub mod grouping_tests; +pub mod index_test; +pub mod readme_test; +pub mod select_test; +pub mod zk_test; + +#[cfg(feature = "blocking")] +pub mod blocking_tests; diff --git a/framework/tests/functionality/readme_test.rs b/framework/tests/functionality/readme_test.rs new file mode 100644 index 0000000..f25ede5 --- /dev/null +++ b/framework/tests/functionality/readme_test.rs @@ -0,0 +1,75 @@ +use crate::structures::BaseTestsBuildup; +use serde::{Deserialize, Serialize}; +use solrstice::clients::async_cloud_client::AsyncSolrCloudClient; +// use solrstice::hosts::solr_server_host::SolrSingleServerHost; +// use solrstice::models::auth::SolrBasicAuth; +// use solrstice::models::context::SolrServerContext; +use solrstice::models::error::SolrError; +use solrstice::queries::index::{DeleteQueryBuilder, UpdateQueryBuilder}; +use solrstice::queries::select::SelectQueryBuilder; +use std::path::Path; + +#[derive(Serialize, Deserialize, Debug)] +struct TestData { + id: String, +} + +#[tokio::test] +pub async fn example() -> Result<(), SolrError> { + let config = BaseTestsBuildup::new().await; + + //Create a solr client. You can also use a list of zookeeper hosts instead of a single server. + // let context = SolrServerContext::new(SolrSingleServerHost::new("http://localhost:8983")) + // .with_auth(SolrBasicAuth::new("solr", Some("SolrRocks"))); + let context = config.context; + let client = AsyncSolrCloudClient::new(context); + + // Upload config + client + .upload_config("example_config", Path::new(&config.config_path)) + .await?; + // client + // .upload_config("example_config", Path::new("/path/to/config")) + // .await?; + + // Create collection + client + .create_collection("example_collection", "example_config", 1, 1) + .await?; + + // Index document + let docs = vec![TestData { + id: "example_document".to_string(), + }]; + client + .index( + &UpdateQueryBuilder::new(), + "example_collection", + docs.as_slice(), + ) + .await?; + + // Search and retrieve the document + let docs = client + .select( + &SelectQueryBuilder::new().fq(&["id:example_document"]), + "example_collection", + ) + .await? + .get_response() + .ok_or("No response provided")? + .get_docs::()?; + assert_eq!(docs.len(), 1); + + // Delete the document + client + .delete( + &DeleteQueryBuilder::new().ids(&["example_document"]), + "example_collection", + ) + .await?; + + client.delete_collection("example_collection").await?; + client.delete_config("example_config").await?; + Ok(()) +} diff --git a/framework/tests/functionality/select_test.rs b/framework/tests/functionality/select_test.rs new file mode 100644 index 0000000..f079314 --- /dev/null +++ b/framework/tests/functionality/select_test.rs @@ -0,0 +1,87 @@ +use crate::structures::{get_test_data, City, FunctionalityTestsBuildup}; +use solrstice::models::error::SolrError; +use solrstice::queries::index::UpdateQueryBuilder; +use solrstice::queries::select::SelectQueryBuilder; + +#[tokio::test] +async fn select_works_when_no_result() -> Result<(), SolrError> { + let config = FunctionalityTestsBuildup::build_up("SelectNoResult") + .await + .unwrap(); + + let result = SelectQueryBuilder::new() + .execute(&config.context, &config.collection_name) + .await + .unwrap(); + assert_eq!( + result + .get_response() + .unwrap() + .get_docs::() + .unwrap() + .len(), + 0 + ); + let _ = config.tear_down().await; + Ok(()) +} + +#[tokio::test] +async fn select_works_when_no_result_serde_value() -> Result<(), SolrError> { + let config = FunctionalityTestsBuildup::build_up("SelectNoResultSerdeValue") + .await + .unwrap(); + + let result = SelectQueryBuilder::new() + .execute(&config.context, &config.collection_name) + .await + .unwrap(); + assert_eq!( + result + .get_response() + .unwrap() + .get_docs::() + .unwrap() + .len(), + 0 + ); + let _ = config.tear_down().await; + Ok(()) +} + +#[tokio::test] +async fn select_works_using_cursor_mark() -> Result<(), SolrError> { + let config = FunctionalityTestsBuildup::build_up("SelectCursorMark") + .await + .unwrap(); + + UpdateQueryBuilder::new() + .execute(&config.context, &config.collection_name, &get_test_data()) + .await + .unwrap(); + + let mut cursor_mark = "*".to_string(); + let mut current_iteration = 0; + loop { + if current_iteration > 100 { + panic!("Cursor mark test failed. Too many iterations"); + } + let result = SelectQueryBuilder::new() + .fq(&["age:[* TO *]"]) + .rows(1) + .cursor_mark(cursor_mark.as_str()) + .sort(&["id desc"]) + .execute(&config.context, &config.collection_name) + .await + .unwrap(); + if let Some(next_cursor_mark) = result.next_cursor_mark { + if cursor_mark.as_str() == "*" { + return Ok(()); + } + cursor_mark = next_cursor_mark; + } else { + panic!("Cursor mark test failed. No next cursor mark") + } + current_iteration += 1; + } +} diff --git a/framework/tests/functionality/zk_test.rs b/framework/tests/functionality/zk_test.rs new file mode 100644 index 0000000..95fe42d --- /dev/null +++ b/framework/tests/functionality/zk_test.rs @@ -0,0 +1,40 @@ +use crate::structures::BaseTestsBuildup; +use solrstice::hosts::solr_host::SolrHost; +use solrstice::hosts::zookeeper_host::ZookeeperEnsembleHostConnector; +use std::time::Duration; +use std::vec; + +#[tokio::test] +async fn create_zookeeper_client() { + BaseTestsBuildup::new().await; + let zk_hosts = vec![std::env::var("ZK_HOST").unwrap()]; + ZookeeperEnsembleHostConnector::new( + zk_hosts + .iter() + .map(|x| x.as_str()) + .collect::>() + .as_slice(), + Duration::from_secs(15), + ) + .connect() + .await + .unwrap(); +} + +#[tokio::test] +async fn get_solr_node_from_zookeeper() { + BaseTestsBuildup::new().await; + let zk_hosts = vec![std::env::var("ZK_HOST").unwrap()]; + let host = ZookeeperEnsembleHostConnector::new( + zk_hosts + .iter() + .map(|x| x.as_str()) + .collect::>() + .as_slice(), + Duration::from_secs(15), + ) + .connect() + .await + .unwrap(); + let _ = host.get_solr_node().await.unwrap(); +} diff --git a/framework/tests/main.rs b/framework/tests/main.rs new file mode 100644 index 0000000..15ccec1 --- /dev/null +++ b/framework/tests/main.rs @@ -0,0 +1,2 @@ +pub mod functionality; +pub mod structures; diff --git a/framework/tests/structures.rs b/framework/tests/structures.rs new file mode 100644 index 0000000..8c3fac2 --- /dev/null +++ b/framework/tests/structures.rs @@ -0,0 +1,146 @@ +use serde::{Deserialize, Serialize}; +use solrstice::hosts::solr_server_host::SolrSingleServerHost; +use solrstice::models::auth::SolrBasicAuth; +use solrstice::models::context::{SolrServerContext, SolrServerContextBuilder}; +use solrstice::models::error::SolrError; +use solrstice::queries::collection::{create_collection, delete_collection}; +use solrstice::queries::config::{delete_config, upload_config}; +use solrstice::queries::helpers::basic_solr_request; +use std::path::Path; +use std::string::ToString; +use std::time::Duration; + +pub struct BaseTestsBuildup { + pub context: SolrServerContext, + pub config_path: String, +} + +impl BaseTestsBuildup { + pub async fn new() -> Self { + dotenv::from_filename("../test_setup/.env").ok(); + let username = std::env::var("SOLR_USERNAME").unwrap(); + let password = std::env::var("SOLR_PASSWORD").unwrap(); + let auth = match username.is_empty() { + true => None, + false => Some(SolrBasicAuth::new( + username.as_str(), + Some(password.as_str()), + )), + }; + let builder = SolrServerContextBuilder::new(SolrSingleServerHost::new( + std::env::var("SOLR_HOST").unwrap().as_str(), + )); + let context = if let Some(auth) = auth { + builder.with_auth(auth).build() + } else { + builder.build() + }; + wait_for_solr(&context, Duration::from_secs(30)).await; + BaseTestsBuildup { + context, + config_path: "../test_setup/test_collection".to_string(), + } + } +} + +pub struct FunctionalityTestsBuildup { + pub context: SolrServerContext, + pub config_path: String, + pub basename: String, + pub config_name: String, + pub collection_name: String, +} + +impl FunctionalityTestsBuildup { + pub async fn build_up(basename: &str) -> Result { + dotenv::from_filename("../test_setup/.env").ok(); + let host = std::env::var("SOLR_HOST").unwrap(); + let config_path = "../test_setup/test_collection".to_string(); + let username = std::env::var("SOLR_USERNAME").unwrap(); + let password = std::env::var("SOLR_PASSWORD").unwrap(); + let auth = match username.is_empty() { + true => { + return Err(SolrError::Unknown( + "Could not find solr username in tests .env file".to_string(), + )) + } + false => SolrBasicAuth::new(username.as_str(), Some(password.as_str())), + }; + + let config_name = basename.to_owned() + "Config"; + let collection_name = basename.to_owned() + "Collection"; + + let solr_request = SolrServerContextBuilder::new(SolrSingleServerHost::new(host.as_str())) + .with_auth(auth) + .build(); + wait_for_solr(&solr_request, Duration::from_secs(30)).await; + + let _ = delete_collection(&solr_request, &collection_name).await; + let _ = delete_config(&solr_request, &config_name).await; + + upload_config(&solr_request, &config_name, Path::new(&config_path)) + .await + .unwrap(); + create_collection(&solr_request, &collection_name, &config_name, 1, 1) + .await + .unwrap(); + + Ok(Self { + context: solr_request, + basename: basename.to_string(), + config_path, + collection_name, + config_name, + }) + } + + pub async fn tear_down(&self) -> Result<(), SolrError> { + delete_collection(&self.context, &self.collection_name) + .await + .unwrap(); + delete_config(&self.context, &self.config_name) + .await + .unwrap(); + Ok(()) + } +} + +#[derive(Deserialize, Serialize, Eq, PartialEq, Debug)] +pub struct City { + id: String, + city_name: String, + population: Vec, +} + +#[derive(Deserialize, Serialize, Eq, PartialEq, Debug)] +pub struct Population { + id: String, + age: usize, + count: usize, +} + +pub fn get_test_data() -> Vec { + // Load data from json file using serde, and deserialize it into a Vec + + let data: Vec = + serde_json::from_reader(std::fs::File::open("../test_setup/test_data.json").unwrap()) + .unwrap(); + data +} + +pub async fn wait_for_solr(context: &SolrServerContext, max_time: Duration) { + let end: std::time::Instant = std::time::Instant::now() + max_time; + while std::time::Instant::now() < end { + let response = basic_solr_request( + context, + "/solr/admin/collections", + &[("action", "CLUSTERSTATUS")], + ) + .await; + if response.is_ok() { + return; + } + tokio::time::sleep(Duration::from_secs(1)).await; + } + panic!("Solr did not respond within {:?} seconds", max_time); +} diff --git a/test_setup/.env b/test_setup/.env new file mode 100644 index 0000000..f743824 --- /dev/null +++ b/test_setup/.env @@ -0,0 +1,5 @@ +ZK_HOST="localhost:2181" +SOLR_HOST="http://localhost:8983" +SOLR_USERNAME=solr +SOLR_PASSWORD=SolrRocks +SPEEDBUMP_HOST="http://localhost:8984" \ No newline at end of file diff --git a/test_setup/test_collection/managed-schema b/test_setup/test_collection/managed-schema new file mode 100644 index 0000000..209cb04 --- /dev/null +++ b/test_setup/test_collection/managed-schema @@ -0,0 +1,38 @@ + + + + id + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/test_setup/test_collection/protwords.txt b/test_setup/test_collection/protwords.txt new file mode 100644 index 0000000..e69de29 diff --git a/test_setup/test_collection/solrconfig.xml b/test_setup/test_collection/solrconfig.xml new file mode 100644 index 0000000..81215db --- /dev/null +++ b/test_setup/test_collection/solrconfig.xml @@ -0,0 +1,66 @@ + + + + 8.11.1 + ${solr.data.dir:} + + + + ${solr.lock.type:native} + + + + + ${solr.ulog.dir:} + ${solr.ulog.numVersionBuckets:65536} + + + + ${solr.autoCommit.maxTime:15000} + false + + + + ${solr.autoSoftCommit.maxTime:-1} + + + + + ${solr.max.booleanClauses:1024} + + + + 20 + 200 + + + + + + + + + + + explicit + 10 + + + + + + _text_ + + + + + + + \ No newline at end of file diff --git a/test_setup/test_collection/stopwords.txt b/test_setup/test_collection/stopwords.txt new file mode 100644 index 0000000..e69de29 diff --git a/test_setup/test_collection/synonyms.txt b/test_setup/test_collection/synonyms.txt new file mode 100644 index 0000000..e69de29 diff --git a/test_setup/test_data.json b/test_setup/test_data.json new file mode 100644 index 0000000..c792b08 --- /dev/null +++ b/test_setup/test_data.json @@ -0,0 +1,44 @@ +[ + { + "id": "city_Alta", + "city_name": "Alta", + "population": [ + { + "id": "city_Alta_20", + "age": 20, + "count": 100 + }, + { + "id": "city_Alta_40", + "age": 40, + "count": 50 + }, + { + "id": "city_Alta_60", + "age": 60, + "count": 50 + } + ] + }, + { + "id": "city_Tromsø", + "city_name": "Tromsø", + "population": [ + { + "id": "city_Tromsø_20", + "age": 20, + "count": 400 + }, + { + "id": "city_Tromsø_40", + "age": 40, + "count": 200 + }, + { + "id": "city_Tromsø_60", + "age": 60, + "count": 200 + } + ] + } +] \ No newline at end of file diff --git a/wrappers/python/.gitignore b/wrappers/python/.gitignore new file mode 100644 index 0000000..5985622 --- /dev/null +++ b/wrappers/python/.gitignore @@ -0,0 +1,75 @@ +/target + +# Byte-compiled / optimized / DLL files +__pycache__/ +.pytest_cache/ +*.py[cod] + +# C extensions +*.so + +# Distribution / packaging +.Python +.venv/ +env/ +bin/ +build/ +develop-eggs/ +dist/ +eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +include/ +man/ +venv/ +*.egg-info/ +.installed.cfg +*.egg + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt +pip-selfcheck.json + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.cache +nosetests.xml +coverage.xml + +# Translations +*.mo + +# Mr Developer +.mr.developer.cfg +.project +.pydevproject + +# Rope +.ropeproject + +# Django stuff: +*.log +*.pot + +.DS_Store + +# Sphinx documentation +docs/_build/ + +# PyCharm +.idea/ + +# VSCode +.vscode/ + +# Pyenv +.python-version + +docs/ +Cargo.lock \ No newline at end of file diff --git a/wrappers/python/Cargo.toml b/wrappers/python/Cargo.toml new file mode 100644 index 0000000..56fd90b --- /dev/null +++ b/wrappers/python/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "solrstice-py" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[lib] +name = "solrstice" +crate-type = ["cdylib"] + +[dependencies] +pyo3 = { version = "0.19", features = ["extension-module", "serde", "abi3-py38"] } +pyo3-asyncio = { version = "0.19", features = ["attributes", "tokio-runtime"] } +pythonize = "0.19" +solrstice = { path = "../../framework", features = ["blocking"] } +serde = "1.0" +serde_json = "1.0" +reqwest = "0.11" +async-trait = "0.1" +# Needed for manylinux +openssl = {version = "0.10", features = ["vendored"]} \ No newline at end of file diff --git a/wrappers/python/LICENSE-APACHE b/wrappers/python/LICENSE-APACHE new file mode 100644 index 0000000..892cc89 --- /dev/null +++ b/wrappers/python/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 Andreas H Johansen + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/wrappers/python/LICENSE-MIT b/wrappers/python/LICENSE-MIT new file mode 100644 index 0000000..0dc6b72 --- /dev/null +++ b/wrappers/python/LICENSE-MIT @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 Andreas H Johansen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/wrappers/python/README.md b/wrappers/python/README.md new file mode 100644 index 0000000..18eee7b --- /dev/null +++ b/wrappers/python/README.md @@ -0,0 +1,64 @@ +# Solrstice Python Wrapper +Solrstice is a solr 8+ cloud aware client library written in rust. With this wrapper, you can use it in python. + +Both asyncio and blocking clients are provided. All apis have type hints. +## Installation +```bash +pip install solrstice +``` +## Basic Usage +### Async +```python +import asyncio +from solrstice.clients import AsyncSolrCloudClient +from solrstice.hosts import SolrSingleServerHost, SolrServerContext +from solrstice.auth import SolrBasicAuth +from solrstice.queries import UpdateQueryBuilder, SelectQueryBuilder, DeleteQueryBuilder + +# A SolrServerContext specifies how the library should interact with Solr +context = SolrServerContext(SolrSingleServerHost('localhost:8983'), SolrBasicAuth('solr', 'SolrRocks')) +client = AsyncSolrCloudClient(context) + +async def main(): + # Create config and collection + await client.upload_config('example_config', 'path/to/config') + await client.create_collection('example_collection', 'example_config', shards=1, replication_factor=1) + + # Index a document + await client.index(UpdateQueryBuilder(), 'example_collection', [{'id': 'example_document', 'title': 'Example document'}]) + + # Search for the document + response = await client.select(SelectQueryBuilder(fq=['title:Example document']), 'example_collection') + docs = response.get_response().docs + + # Delete the document + await client.delete(DeleteQueryBuilder(ids=['example_document']), 'example_collection') + + +asyncio.run(main()) +``` +### Blocking +```python +from solrstice.clients import BlockingSolrCloudClient +from solrstice.hosts import SolrSingleServerHost, SolrServerContext +from solrstice.auth import SolrBasicAuth +from solrstice.queries import UpdateQueryBuilder, SelectQueryBuilder, DeleteQueryBuilder + +# A SolrServerContext specifies how the library should interact with Solr +context = SolrServerContext(SolrSingleServerHost('localhost:8983'), SolrBasicAuth('solr', 'SolrRocks')) +client = BlockingSolrCloudClient(context) + +# Create config and collection +client.upload_config('example_config', 'path/to/config') +client.create_collection('example_collection', 'example_config', shards=1, replication_factor=1) + +# Index a document +client.index(UpdateQueryBuilder(), 'example_collection', [{'id': 'example_document', 'title': 'Example document'}]) + +# Search for the document +response = client.select(SelectQueryBuilder(fq=['title:Example document']), 'example_collection') +docs = response.get_response().docs +``` + +## Notes +* Multiprocessing does not work, and will block forever. Normal multithreading works fine. \ No newline at end of file diff --git a/wrappers/python/generate_documentation.py b/wrappers/python/generate_documentation.py new file mode 100755 index 0000000..8f1ec2a --- /dev/null +++ b/wrappers/python/generate_documentation.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import os +import shutil +import tempfile +from pathlib import Path + +import pdoc + +if __name__ == '__main__': + tmpdir = tempfile.TemporaryDirectory() + package_name = 'solrstice' + package_path = os.path.join(tmpdir.name, package_name) + current_directory = os.path.dirname(os.path.realpath(__file__)) + docs_directory = Path(os.path.join(Path(current_directory), 'docs')) + + os.mkdir(package_path) + for filename in os.listdir(package_name): + f = os.path.join(package_name, filename) + if os.path.isfile(f) and filename.endswith('.pyi'): + shutil.copyfile(f, os.path.join(package_path, filename[:-1])) + shutil.copyfile('README.md', os.path.join(tmpdir.name, 'README.md')) + with open(os.path.join(package_path, '__init__.py'), 'w') as f: + f.write(''' + """ + .. include:: ../README.md + """ + ''') + + pdoc.pdoc(os.path.join(tmpdir.name, package_name), output_directory=docs_directory) diff --git a/wrappers/python/pyproject.toml b/wrappers/python/pyproject.toml new file mode 100644 index 0000000..ed63ee5 --- /dev/null +++ b/wrappers/python/pyproject.toml @@ -0,0 +1,20 @@ +[build-system] +requires = ["maturin>=1.0,<2.0"] +build-backend = "maturin" + +[project] +name = "solrstice" +requires-python = ">=3.8" +readme = "README.md" +license = "MIT OR Apache-2.0" +description = "A Solr client library written in Rust" +keywords = ["solr", "rust", "search"] +classifiers = [ + "Programming Language :: Rust", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", +] + + +[tool.maturin] +features = ["pyo3/extension-module"] diff --git a/wrappers/python/pytest.ini b/wrappers/python/pytest.ini new file mode 100644 index 0000000..65eeff2 --- /dev/null +++ b/wrappers/python/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +addopts = --doctest-modules --doctest-glob='*.pyi' +doctest_optionflags = NORMALIZE_WHITESPACE \ No newline at end of file diff --git a/wrappers/python/requirements-dev.txt b/wrappers/python/requirements-dev.txt new file mode 100644 index 0000000..a670fad --- /dev/null +++ b/wrappers/python/requirements-dev.txt @@ -0,0 +1,6 @@ +pytest +pytest-asyncio +python-dotenv +maturin +dataclasses-json +pdoc \ No newline at end of file diff --git a/wrappers/python/solrstice/__init__.py b/wrappers/python/solrstice/__init__.py new file mode 100644 index 0000000..08a3ff9 --- /dev/null +++ b/wrappers/python/solrstice/__init__.py @@ -0,0 +1 @@ +from .solrstice import * diff --git a/wrappers/python/solrstice/alias.pyi b/wrappers/python/solrstice/alias.pyi new file mode 100644 index 0000000..00f6aed --- /dev/null +++ b/wrappers/python/solrstice/alias.pyi @@ -0,0 +1,79 @@ +from typing import List, Dict + +from solrstice.hosts import SolrServerContext + + +async def create_alias(context: SolrServerContext, name: str, collections: List[str]) -> None: + """ + Create an alias for a collection on the Solr server + + :param context: The Solr server context + :param name: The name of the alias to create + :param collections: The collections to alias + """ + + +def create_alias_blocking(context: SolrServerContext, name: str, collections: List[str]) -> None: + """ + Create an alias for a collection on the Solr server + + :param context: The Solr server context + :param name: The name of the alias to create + :param collections: The collections to alias + """ + + +async def get_aliases(context: SolrServerContext) -> Dict[str, List[str]]: + """ + Get all aliases on the Solr server + + :param context: The Solr server context + :return: A dictionary of aliases to collections + """ + + +def get_aliases_blocking(context: SolrServerContext) -> Dict[str, List[str]]: + """ + Get all aliases on the Solr server + + :param context: The Solr server context + :return: A dictionary of aliases to collections + """ + + +async def alias_exists(context: SolrServerContext, name: str) -> bool: + """ + Check if an alias exists on the Solr server + + :param context: The Solr server context + :param name: The name of the alias to check + :return: True if the alias exists, False otherwise + """ + + +def alias_exists_blocking(context: SolrServerContext, name: str) -> bool: + """ + Check if an alias exists on the Solr server + + :param context: The Solr server context + :param name: The name of the alias to check + :return: True if the alias exists, False otherwise + """ + + +async def delete_alias(context: SolrServerContext, name: str) -> None: + """ + Delete an alias from the Solr server + + :param context: The Solr server context + :param name: The name of the alias to delete + """ + + +def delete_alias_blocking(context: SolrServerContext, name: str) -> None: + """ + Delete an alias from the Solr server + + :param context: The Solr server context + :param name: The name of the alias to delete + """ diff --git a/wrappers/python/solrstice/auth.pyi b/wrappers/python/solrstice/auth.pyi new file mode 100644 index 0000000..40ffd2e --- /dev/null +++ b/wrappers/python/solrstice/auth.pyi @@ -0,0 +1,17 @@ +from abc import ABC +from typing import Optional + + +class SolrAuth(ABC): + """Base class for Solr authentication""" + + +class SolrBasicAuth(SolrAuth): + """Basic authentication for Solr + + :param username: Username for Solr + :param password: Password for Solr + """ + + def __init__(self, username: str, password: Optional[str] = None) -> None: + pass diff --git a/wrappers/python/solrstice/clients.pyi b/wrappers/python/solrstice/clients.pyi new file mode 100644 index 0000000..1919f9a --- /dev/null +++ b/wrappers/python/solrstice/clients.pyi @@ -0,0 +1,254 @@ +from os import PathLike +from typing import TYPE_CHECKING, Union, List, Optional, Dict, Any + +if TYPE_CHECKING: + from solrstice.hosts import SolrServerContext + from solrstice.queries import SelectQueryBuilder, UpdateQueryBuilder, DeleteQueryBuilder + from solrstice.response import SolrResponse + +class AsyncSolrCloudClient: + """ + A client for interacting with a SolrCloud cluster asynchronously. + + :param context: The context of the Solr server. + """ + def __init__(self, context: 'SolrServerContext'): + pass + + async def upload_config(self, config_name: str, config_path: Union[PathLike[str], str]) -> None: + """Uploads a Solr config to a Solr instance + + :param config_name: Name of the config + :param config_path: Path to the config + """ + pass + + async def get_configs(self) -> List[str]: + """Gets a list of Solr configs on a Solr instance + + :param context: SolrServerRequest context + """ + pass + + async def config_exists(self, config_name: str) -> bool: + """Checks if a Solr config exists on a Solr instance + + :param config_name: Name of the config + """ + pass + + async def delete_config(self, config_name: str) -> None: + """Deletes a Solr config from a Solr instance + + :param config_name: Name of the config + """ + pass + + async def create_collection(self, name: str, config: str, shards: Optional[int] = 1, replication_factor: Optional[int] = 1) -> None: + """ + Create a collection on the Solr server. + + :param name: The name of the collection to create. + :param config: The name of the config to use for the collection. + :param shards: The number of shards to create. + :param replication_factor: The replication factor to use. + """ + + async def get_collections(self) -> List[str]: + """ + Get the list of collections on the Solr server. + + :return: The list of collections on the Solr server. + """ + + async def collection_exists(self, name: str) -> bool: + """ + Check if a collection exists on the Solr server. + + :param name: The name of the collection to check. + :return: True if the collection exists, False otherwise. + """ + + async def delete_collection(self, name: str) -> None: + """ + Delete a config from the Solr server. + + :param context: The Solr server context. + :param name: The name of the collection to delete. + """ + + async def create_alias(self, name: str, collections: List[str]) -> None: + """ + Create an alias for a collection on the Solr server + + :param name: The name of the alias to create + :param collections: The collections to alias + """ + + async def get_aliases(self) -> Dict[str, List[str]]: + """ + Get all aliases on the Solr server + + :return: A dictionary of aliases to collections + """ + + async def alias_exists(self, name: str) -> bool: + """ + Check if an alias exists on the Solr server + + :param name: The name of the alias to check + :return: True if the alias exists, False otherwise + """ + + async def delete_alias(self, name: str) -> None: + """ + Delete an alias from the Solr server + + :param name: The name of the alias to delete + """ + + async def select(self, builder: 'SelectQueryBuilder', collection: str) -> 'SolrResponse': + """Execute a select query + + :param builder: The query builder + :param collection: The collection to query + """ + + async def index(self, builder: 'UpdateQueryBuilder', collection: str, data: List[Dict[str, Any]]) -> 'SolrResponse': + """Execute an index query + + :param builder: The query builder + :param collection: The collection to index + :param data: The data to index + """ + + async def delete(self, builder: 'DeleteQueryBuilder', collection: str) -> 'SolrResponse': + """Execute a delete query + + :param builder: The query builder + :param collection: The collection to delete from + """ + + +class BlockingSolrCloudClient: + """ + A client for interacting with a SolrCloud cluster non-asynchronously. + + :param context: The context of the Solr server. + """ + def __init__(self, context: 'SolrServerContext'): + pass + + def upload_config(self, config_name: str, config_path: Union[PathLike[str], str]) -> None: + """Uploads a Solr config to a Solr instance + + :param config_name: Name of the config + :param config_path: Path to the config + """ + pass + + def get_configs(self) -> List[str]: + """Gets a list of Solr configs on a Solr instance + + :param context: SolrServerRequest context + """ + pass + + def config_exists(self, config_name: str) -> bool: + """Checks if a Solr config exists on a Solr instance + + :param config_name: Name of the config + """ + pass + + def delete_config(self, config_name: str) -> None: + """Deletes a Solr config from a Solr instance + + :param config_name: Name of the config + """ + pass + + def create_collection(self, name: str, config: str, shards: Optional[int] = 1, replication_factor: Optional[int] = 1) -> None: + """ + Create a collection on the Solr server. + + :param name: The name of the collection to create. + :param config: The name of the config to use for the collection. + :param shards: The number of shards to create. + :param replication_factor: The replication factor to use. + """ + + def get_collections(self) -> List[str]: + """ + Get the list of collections on the Solr server. + + :return: The list of collections on the Solr server. + """ + + def collection_exists(self, name: str) -> bool: + """ + Check if a collection exists on the Solr server. + + :param name: The name of the collection to check. + :return: True if the collection exists, False otherwise. + """ + + def delete_collection(self, name: str) -> None: + """ + Delete a config from the Solr server. + + :param context: The Solr server context. + :param name: The name of the collection to delete. + """ + + def create_alias(self, name: str, collections: List[str]) -> None: + """ + Create an alias for a collection on the Solr server + + :param name: The name of the alias to create + :param collections: The collections to alias + """ + + def get_aliases(self) -> Dict[str, List[str]]: + """ + Get all aliases on the Solr server + + :return: A dictionary of aliases to collections + """ + + def alias_exists(self, name: str) -> bool: + """ + Check if an alias exists on the Solr server + + :param name: The name of the alias to check + :return: True if the alias exists, False otherwise + """ + + def delete_alias(self, name: str) -> None: + """ + Delete an alias from the Solr server + + :param name: The name of the alias to delete + """ + + def select(self, builder: 'SelectQueryBuilder', collection: str) -> 'SolrResponse': + """Execute a select query + + :param builder: The query builder + :param collection: The collection to query + """ + + def index(self, builder: 'UpdateQueryBuilder', collection: str, data: List[Dict[str, Any]]) -> 'SolrResponse': + """Execute an index query + + :param builder: The query builder + :param collection: The collection to index + :param data: The data to index + """ + + def delete(self, builder: 'DeleteQueryBuilder', collection: str) -> 'SolrResponse': + """Execute a delete query + + :param builder: The query builder + :param collection: The collection to delete from + """ \ No newline at end of file diff --git a/wrappers/python/solrstice/collection.pyi b/wrappers/python/solrstice/collection.pyi new file mode 100644 index 0000000..04a9ab2 --- /dev/null +++ b/wrappers/python/solrstice/collection.pyi @@ -0,0 +1,76 @@ +from typing import List, Optional + +from solrstice.hosts import SolrServerContext + +async def create_collection(context: SolrServerContext, name: str, config: str, shards: Optional[int] = 1, replication_factor: Optional[int] = 1) -> None: + """ + Create a collection on the Solr server. + + :param context: The Solr server context. + :param name: The name of the collection to create. + :param config: The name of the config to use for the collection. + :param shards: The number of shards to create. + :param replication_factor: The replication factor to use. + """ + + +def create_collection_blocking(context: SolrServerContext, name: str, config: str, shards: Optional[int] = 1, replication_factor: Optional[int] = 1) -> None: + """ + Create a collection on the Solr server. + + :param context: The Solr server context. + :param name: The name of the collection to create. + :param config: The name of the config to use for the collection. + :param shards: The number of shards to create. + :param replication_factor: The replication factor to use. + """ + +async def get_collections(context: SolrServerContext) -> List[str]: + """ + Get the list of collections on the Solr server. + + :param context: The Solr server context. + :return: The list of collections on the Solr server. + """ + +def get_collections_blocking(context: SolrServerContext) -> List[str]: + """ + Get the list of collections on the Solr server. + + :param context: The Solr server context. + :return: The list of collections on the Solr server. + """ + +async def collection_exists(context: SolrServerContext, name: str) -> bool: + """ + Check if a collection exists on the Solr server. + + :param context: The Solr server context. + :param name: The name of the collection to check. + :return: True if the collection exists, False otherwise. + """ + +def collection_exists_blocking(context: SolrServerContext, name: str) -> bool: + """ + Check if a collection exists on the Solr server. + + :param context: The Solr server context. + :param name: The name of the collection to check. + :return: True if the collection exists, False otherwise. + """ + +async def delete_collection(context: SolrServerContext, name: str) -> None: + """ + Delete a config from the Solr server. + + :param context: The Solr server context. + :param name: The name of the collection to delete. + """ + +def delete_collection_blocking(context: SolrServerContext, name: str) -> None: + """ + Delete a config from the Solr server. + + :param context: The Solr server context. + :param name: The name of the collection to delete. + """ \ No newline at end of file diff --git a/wrappers/python/solrstice/config.pyi b/wrappers/python/solrstice/config.pyi new file mode 100644 index 0000000..1cf4053 --- /dev/null +++ b/wrappers/python/solrstice/config.pyi @@ -0,0 +1,68 @@ +from os import PathLike + +from solrstice.hosts import SolrServerContext +from typing import List, Union + +async def upload_config(context: SolrServerContext, config_name: str, config_path: Union[PathLike[str], str]) -> None: + """Uploads a Solr config to a Solr instance + + :param context: SolrServerRequest context + :param config_name: Name of the config + :param config_path: Path to the config + """ + pass + +def upload_config_blocking(context: SolrServerContext, config_name: str, config_path: Union[PathLike[str], str]) -> None: + """Uploads a Solr config to a Solr instance + + :param context: SolrServerRequest context + :param config_name: Name of the config + :param config_path: Path to the config + """ + pass + +async def delete_config(context: SolrServerContext, config_name: str) -> None: + """Deletes a Solr config from a Solr instance + + :param context: SolrServerRequest context + :param config_name: Name of the config + """ + pass + +def delete_config_blocking(context: SolrServerContext, config_name: str) -> None: + """Deletes a Solr config from a Solr instance + + :param context: SolrServerRequest context + :param config_name: Name of the config + """ + pass + +async def config_exists(context: SolrServerContext, config_name: str) -> bool: + """Checks if a Solr config exists on a Solr instance + + :param context: SolrServerRequest context + :param config_name: Name of the config + """ + pass + +def config_exists_blocking(context: SolrServerContext, config_name: str) -> bool: + """Checks if a Solr config exists on a Solr instance + + :param context: SolrServerRequest context + :param config_name: Name of the config + """ + pass + +async def get_configs(context: SolrServerContext) -> List[str]: + """Gets a list of Solr configs on a Solr instance + + :param context: SolrServerRequest context + """ + pass + +def get_configs_blocking(context: SolrServerContext) -> List[str]: + """Gets a list of Solr configs on a Solr instance + + :param context: SolrServerRequest builder + """ + pass \ No newline at end of file diff --git a/wrappers/python/solrstice/group.pyi b/wrappers/python/solrstice/group.pyi new file mode 100644 index 0000000..33f18d2 --- /dev/null +++ b/wrappers/python/solrstice/group.pyi @@ -0,0 +1,84 @@ +from enum import Enum +from typing import List + +from typing import Any, Optional + +from solrstice.response import SolrDocsResponse + + +class GroupFormatting(Enum): + Simple = "Simple" + Grouped = "Grouped" + + +class GroupingComponent: + """ + Grouping component, used in conjunction with SelectQueryBuilder + + :param fields: Fields to group results by + :param queries: Queries to group by + :param limit: Limit the number of groups returned for each set of grouped documents + :param offset: Offset the number of groups returned for each set of grouped documents + :param sort: Sort the groups + :param format: The group format, either Simple, or Grouped + :param main: Should the group result be the main result + :param n_groups: Should the number of groups be counted + :param truncate: Truncate + :param facet: Facet + """ + def __init__(self, fields: Optional[List[str]] = None, queries: Optional[List[str]] = None, + limit: Optional[int] = None, offset: Optional[int] = None, sort: Optional[List[str]] = None, + format: Optional[GroupFormatting] = None, main: Optional[bool] = None, + n_groups: Optional[bool] = None, truncate: Optional[bool] = None, facet: Optional[bool] = None): + pass + + fields: Optional[List[str]] + queries: Optional[List[str]] + limit: Optional[int] + offset: Optional[int] + sort: Optional[List[str]] + format: Optional[GroupFormatting] + main: Optional[bool] + n_groups: Optional[bool] + truncate: Optional[bool] + facet: Optional[bool] + + +class SolrGroupFieldResult: + """ + Represents a group field result + """ + group_value: Any + doc_list: SolrDocsResponse + + +class SolrGroupResult: + """ + Represents a group result + """ + matches: int + n_groups: Optional[int] + + def get_field_result(self) -> List[SolrGroupFieldResult]: + """ + Gets the field results form a group query + :return: List of group field results + + :raises: RuntimeError if conversion failed, or no field result existed + """ + + def get_query_result(self) -> SolrDocsResponse: + """ + Gets the query result from a group query + :return: Query result + + :raises: RuntimeError if conversion failed, or no query result existed + """ + + def get_simple_result(self) -> SolrDocsResponse: + """ + Gets the result from a group query where `GroupFormatting.Simple` was used + :return: Simple result + + :raises: RuntimeError if conversion failed, or no simple result existed + """ diff --git a/wrappers/python/solrstice/hosts.pyi b/wrappers/python/solrstice/hosts.pyi new file mode 100644 index 0000000..ab0e831 --- /dev/null +++ b/wrappers/python/solrstice/hosts.pyi @@ -0,0 +1,59 @@ +from abc import ABC +from typing import List, Optional + +from solrstice.auth import SolrAuth + + +class SolrHost(ABC): + """Base class for Solr hosts""" + + +class SolrSingleServerHost(SolrHost): + """Solr host for a single Solr instance + + :param host: Hostname of the Solr instance + """ + + def __init__(self, host: str) -> None: + pass + + +class SolrMultipleServerHost(SolrHost): + """Solr host for multiple solr instances + + :param hosts: List of Solr instances + :param timeout: Amount of seconds before declaring a node not responding, and going to the next + """ + + def __init__(self, host: List[str], timeout: float) -> None: + pass + +class ZookeeperEnsembleHost(SolrHost): + """Zookeeper ensemble connection. Cannot be constructed directly, use ZookeeperEnsembleHostConnector instead""" + +class ZookeeperEnsembleHostConnector: + """ The builder for a Zookeeper ensemble host + + :param hosts: List of Zookeeper instances + :param timeout: Timeout for connecting to Zookeeper + """ + def __init__(self, hosts: List[str], timeout: float) -> None: + pass + + async def connect(self) -> ZookeeperEnsembleHost: + """Connect to the Zookeeper ensemble""" + pass + + def connect_blocking(self) -> ZookeeperEnsembleHost: + """Connect to the Zookeeper ensemble""" + pass + +class SolrServerContext: + """The context for a connection to a solr instance + + :param host: An instance of SolrHost specifying how to connect to a solr instance + :param auth: An instance of SolrAuth specifying how to authenticate with the solr instance + """ + + def __init__(self, host: SolrHost, auth: Optional[SolrAuth] = None): + pass \ No newline at end of file diff --git a/wrappers/python/solrstice/py.typed b/wrappers/python/solrstice/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/wrappers/python/solrstice/queries.pyi b/wrappers/python/solrstice/queries.pyi new file mode 100644 index 0000000..1ee95d4 --- /dev/null +++ b/wrappers/python/solrstice/queries.pyi @@ -0,0 +1,107 @@ +from enum import Enum +from typing import List, Dict, Optional, TYPE_CHECKING + +if TYPE_CHECKING: + from solrstice.group import GroupingComponent + from solrstice.hosts import SolrServerContext + from solrstice.response import SolrResponse + + +class SelectQueryBuilder: + """Builder for a select query + + :param q: The query string + :param fq: The filter queries + :param fl: The fields to return + :param sort: The sort order + :param rows: The number of rows to return + :param start: Set the start index + :param cursor_mark: Set the cursor mark + :param grouping: Set the grouping component + """ + def __init__(self, q: Optional[str] = None, fq: Optional[List[str]] = None, fl: Optional[List[str]] = None, sort: Optional[List[str]] = None, rows: Optional[int] = None, start: Optional[int] = None, cursor_mark: Optional[str] = None, grouping: Optional['GroupingComponent'] = None) -> None: + pass + + q: str + fq: Optional[List[str]] + fl: Optional[List[str]] + sort: Optional[List[str]] + rows: int + start: int + cursor_mark: str + grouping: Optional['GroupingComponent'] + + + async def execute(self, context: 'SolrServerContext', collection: str) -> 'SolrResponse': + """Execute the query + + :param context: The context for the connection to the solr instance + :param collection: The collection to query + """ + + def execute_blocking(self, context: 'SolrServerContext', collection: str) -> 'SolrResponse': + """Execute the query + + :param context: The context for the connection to the solr instance + :param collection: The collection to query + """ + +class CommitType(Enum): + Hard = "Hard", + Soft = "Soft" + +class UpdateQueryBuilder: + """Builder for an update query + + :param handler: The handler for the update query + :param commit_type: The commit type for the update query + """ + def __init__(self, handler: Optional[str] = 'update', commit_type: Optional[CommitType] = CommitType.Hard) -> None: + pass + + handler: str + commit_type: CommitType + + async def execute(self, context: 'SolrServerContext', collection: str, data: List[Dict]) -> 'SolrResponse': + """Execute the query + + :param context: The context for the connection to the solr instance + :param collection: The collection to update + :param data: The data to update + """ + + def execute_blocking(self, context: 'SolrServerContext', collection: str, data: List[Dict]) -> 'SolrResponse': + """Execute the query + + :param context: The context for the connection to the solr instance + :param collection: The collection to update + :param data: The data to update + """ + +class DeleteQueryBuilder: + """Builder for a delete query + + :param handler: The handler for the delete query + :param commit_type: The commit type for the delete query + """ + def __init__(self, handler: Optional[str] = 'update', commit_type: Optional[CommitType] = CommitType.Hard, ids: Optional[List[str]] = None, queries: Optional[List[str]] = None) -> None: + pass + + handler: str + commit_type: CommitType + ids: List[str] + queries: List[str] + + async def execute(self, context: 'SolrServerContext', collection: str) -> 'SolrResponse': + """Execute the query + + :param context: The context for the connection to the solr instance + :param collection: The collection to delete from + """ + + def execute_blocking(self, context: 'SolrServerContext', collection: str) -> 'SolrResponse': + """Execute the query + + :param context: The context for the connection to the solr instance + :param collection: The collection to delete from + """ \ No newline at end of file diff --git a/wrappers/python/solrstice/response.pyi b/wrappers/python/solrstice/response.pyi new file mode 100644 index 0000000..3ccae5d --- /dev/null +++ b/wrappers/python/solrstice/response.pyi @@ -0,0 +1,28 @@ +from typing import List, Dict, Any, Optional, TYPE_CHECKING + +if TYPE_CHECKING: + from solrstice.group import SolrGroupResult + + +class SolrDocsResponse: + num_found: int + start: int + num_found_exact: bool + docs: List[Dict[str, Any]] + +class SolrResponse: + """The response from a solr query""" + + next_cursor_mark: Optional[str] + + def get_response(self) -> SolrDocsResponse: + """Get the response from a solr query + + :raises RuntimeError if no response in query + """ + + def get_groups(self) -> Dict[str, 'SolrGroupResult']: + """Get the groups from a solr query + + :raises RuntimeError if no groups in query + """ \ No newline at end of file diff --git a/wrappers/python/src/clients.rs b/wrappers/python/src/clients.rs new file mode 100644 index 0000000..35650b6 --- /dev/null +++ b/wrappers/python/src/clients.rs @@ -0,0 +1,251 @@ +use crate::models::context::SolrServerContextWrapper; +use crate::models::response::SolrResponseWrapper; +use crate::queries::alias::{ + alias_exists, alias_exists_blocking, create_alias, create_alias_blocking, + delete_alias_blocking, get_aliases, get_aliases_blocking, +}; +use crate::queries::collection::{ + collection_exists, collection_exists_blocking, create_collection, create_collection_blocking, + delete_collection, delete_collection_blocking, get_collections, get_collections_blocking, +}; +use crate::queries::config::{ + config_exists, config_exists_blocking, delete_config, delete_config_blocking, get_configs, + get_configs_blocking, upload_config, upload_config_blocking, +}; +use crate::queries::index::{DeleteQueryBuilderWrapper, UpdateQueryBuilderWrapper}; +use crate::queries::select::SelectQueryBuilderWrapper; +use pyo3::prelude::*; +use std::collections::HashMap; +use std::path::PathBuf; + +#[pymodule] +pub fn clients(_py: Python<'_>, m: &PyModule) -> PyResult<()> { + m.add_class::()?; + m.add_class::()?; + Ok(()) +} + +#[pyclass(name = "AsyncSolrCloudClient", module = "solrstice.clients")] +#[derive(Clone)] +pub struct AsyncSolrCloudClientWrapper(SolrServerContextWrapper); + +#[pymethods] +impl AsyncSolrCloudClientWrapper { + #[new] + fn new(context: SolrServerContextWrapper) -> Self { + AsyncSolrCloudClientWrapper(context) + } + + pub fn upload_config<'a>( + &self, + py: Python<'a>, + name: String, + path: PathBuf, + ) -> PyResult<&'a PyAny> { + let context = self.0.clone(); + upload_config(py, context, name, path) + } + + pub fn get_configs<'a>(&self, py: Python<'a>) -> PyResult<&'a PyAny> { + let context = self.0.clone(); + get_configs(py, context) + } + + pub fn config_exists<'a>(&self, py: Python<'a>, name: String) -> PyResult<&'a PyAny> { + let context = self.0.clone(); + config_exists(py, context, name) + } + + pub fn delete_config<'a>(&self, py: Python<'a>, name: String) -> PyResult<&'a PyAny> { + let context = self.0.clone(); + delete_config(py, context, name) + } + + pub fn create_collection<'a>( + &self, + py: Python<'a>, + name: String, + config: String, + shards: Option, + replication_factor: Option, + ) -> PyResult<&'a PyAny> { + let context = self.0.clone(); + create_collection(py, context, name, config, shards, replication_factor) + } + + pub fn get_collections<'a>(&self, py: Python<'a>) -> PyResult<&'a PyAny> { + let context = self.0.clone(); + get_collections(py, context) + } + + pub fn collection_exists<'a>(&self, py: Python<'a>, name: String) -> PyResult<&'a PyAny> { + let context = self.0.clone(); + collection_exists(py, context, name) + } + + pub fn delete_collection<'a>(&self, py: Python<'a>, name: String) -> PyResult<&'a PyAny> { + let context = self.0.clone(); + delete_collection(py, context, name) + } + + pub fn get_aliases<'a>(&self, py: Python<'a>) -> PyResult<&'a PyAny> { + let context = self.0.clone(); + get_aliases(py, context) + } + + pub fn create_alias<'a>( + &self, + py: Python<'a>, + name: String, + collections: Vec, + ) -> PyResult<&'a PyAny> { + let context = self.0.clone(); + create_alias(py, context, name, collections) + } + + pub fn alias_exists<'a>(&self, py: Python<'a>, name: String) -> PyResult<&'a PyAny> { + let context = self.0.clone(); + alias_exists(py, context, name) + } + + pub fn index<'a>( + &self, + py: Python<'a>, + builder: UpdateQueryBuilderWrapper, + collection: String, + data: Vec, + ) -> PyResult<&'a PyAny> { + let context = self.0.clone(); + builder.execute(py, context, collection, data) + } + + pub fn select<'a>( + &self, + py: Python<'a>, + builder: &SelectQueryBuilderWrapper, + collection: String, + ) -> PyResult<&'a PyAny> { + let context = self.0.clone(); + builder.execute(py, context, collection) + } + + pub fn delete<'a>( + &self, + py: Python<'a>, + builder: &DeleteQueryBuilderWrapper, + collection: String, + ) -> PyResult<&'a PyAny> { + let context = self.0.clone(); + builder.execute(py, context, collection) + } +} + +#[pyclass(name = "BlockingSolrCloudClient", module = "solrstice.clients")] +#[derive(Clone)] +pub struct BlockingSolrCloudClientWrapper(SolrServerContextWrapper); + +#[pymethods] +impl BlockingSolrCloudClientWrapper { + #[new] + fn new(context: SolrServerContextWrapper) -> Self { + BlockingSolrCloudClientWrapper(context) + } + + pub fn upload_config(&self, py: Python, name: String, path: PathBuf) -> PyResult<()> { + let context = self.0.clone(); + upload_config_blocking(py, context, name, path) + } + + pub fn get_configs(&self, py: Python) -> PyResult> { + let context = self.0.clone(); + get_configs_blocking(py, context) + } + + pub fn config_exists(&self, py: Python, name: String) -> PyResult { + let context = self.0.clone(); + config_exists_blocking(py, context, name) + } + + pub fn delete_config(&self, py: Python, name: String) -> PyResult<()> { + let context = self.0.clone(); + delete_config_blocking(py, context, name) + } + + pub fn create_collection( + &self, + py: Python, + name: String, + config: String, + shards: Option, + replication_factor: Option, + ) -> PyResult<()> { + let context = self.0.clone(); + create_collection_blocking(py, context, name, config, shards, replication_factor) + } + + pub fn get_collections(&self, py: Python) -> PyResult> { + let context = self.0.clone(); + get_collections_blocking(py, context) + } + + pub fn collection_exists(&self, py: Python, name: String) -> PyResult { + let context = self.0.clone(); + collection_exists_blocking(py, context, name) + } + + pub fn delete_collection(&self, py: Python, name: String) -> PyResult<()> { + let context = self.0.clone(); + delete_collection_blocking(py, context, name) + } + + pub fn create_alias(&self, py: Python, name: String, collections: Vec) -> PyResult<()> { + let context = self.0.clone(); + create_alias_blocking(py, context, name, collections) + } + + pub fn get_aliases(&self, py: Python) -> PyResult>> { + let context = self.0.clone(); + get_aliases_blocking(py, context) + } + + pub fn alias_exists(&self, py: Python, name: String) -> PyResult { + let context = self.0.clone(); + alias_exists_blocking(py, context, name) + } + + pub fn delete_alias(&self, py: Python, name: String) -> PyResult<()> { + let context = self.0.clone(); + delete_alias_blocking(py, context, name) + } + + pub fn index( + &self, + py: Python, + builder: UpdateQueryBuilderWrapper, + collection: String, + data: Vec, + ) -> PyResult { + let context = self.0.clone(); + builder.execute_blocking(py, context, collection, data) + } + + pub fn select( + &self, + py: Python, + builder: &SelectQueryBuilderWrapper, + collection: String, + ) -> PyResult { + let context = self.0.clone(); + builder.execute_blocking(py, context, collection) + } + + pub fn delete( + &self, + py: Python, + builder: &DeleteQueryBuilderWrapper, + collection: String, + ) -> PyResult { + let context = self.0.clone(); + builder.execute_blocking(py, context, collection) + } +} diff --git a/wrappers/python/src/hosts.rs b/wrappers/python/src/hosts.rs new file mode 100644 index 0000000..333b646 --- /dev/null +++ b/wrappers/python/src/hosts.rs @@ -0,0 +1,112 @@ +use crate::models::context::SolrServerContextWrapper; +use crate::models::error::PyErrWrapper; +use async_trait::async_trait; +use pyo3::prelude::*; +use solrstice::hosts::solr_host::SolrHost; +use solrstice::hosts::solr_server_host::{SolrMultipleServerHost, SolrSingleServerHost}; +use solrstice::hosts::zookeeper_host::ZookeeperEnsembleHostConnector; +use solrstice::models::error::SolrError; +use std::borrow::Cow; +use std::sync::Arc; +use std::time::Duration; + +#[pymodule] +pub fn hosts(_py: Python<'_>, m: &PyModule) -> PyResult<()> { + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + Ok(()) +} + +#[pyclass(name = "SolrHost", subclass, module = "solrstice.hosts")] +#[derive(Clone)] +pub struct SolrHostWrapper { + pub solr_host: Arc, +} + +#[async_trait] +impl SolrHost for SolrHostWrapper { + async fn get_solr_node(&self) -> Result, SolrError> { + self.solr_host.get_solr_node().await + } +} + +#[pyclass(name = "SolrSingleServerHost", extends = SolrHostWrapper, module= "solrstice.hosts")] +#[derive(Clone)] +pub struct SolrSingleServerHostWrapper; + +#[pymethods] +impl SolrSingleServerHostWrapper { + #[new] + pub fn new(host: String) -> (Self, SolrHostWrapper) { + ( + SolrSingleServerHostWrapper {}, + SolrHostWrapper { + solr_host: Arc::new(SolrSingleServerHost::new(host.as_str())), + }, + ) + } +} + +#[pyclass(name = "SolrMultipleServerHost", extends = SolrHostWrapper, module= "solrstice.hosts")] +#[derive(Clone)] +pub struct SolrMultipleServerHostWrapper; + +#[pymethods] +impl SolrMultipleServerHostWrapper { + #[new] + pub fn new(hosts: Vec, timeout: f32) -> (Self, SolrHostWrapper) { + ( + SolrMultipleServerHostWrapper {}, + SolrHostWrapper { + solr_host: Arc::new(SolrMultipleServerHost::new( + hosts + .iter() + .map(|x| x.as_str()) + .collect::>() + .as_slice(), + Duration::from_secs_f32(timeout), + )), + }, + ) + } +} + +#[pyclass(name = "ZookeeperEnsembleHost", extends = SolrHostWrapper, module= "solrstice.hosts")] +#[derive(Clone)] +pub struct ZookeeperEnsembleHostWrapper; + +#[pyclass(name = "ZookeeperEnsembleHostConnector")] +#[derive(Clone)] +pub struct ZookeeperEnsembleHostConnectorWrapper(ZookeeperEnsembleHostConnector); + +#[pymethods] +impl ZookeeperEnsembleHostConnectorWrapper { + #[new] + pub fn new(hosts: Vec, timeout: f32) -> Self { + ZookeeperEnsembleHostConnectorWrapper(ZookeeperEnsembleHostConnector { + hosts, + timeout: Duration::from_secs_f32(timeout), + }) + } + + pub fn connect<'a>(&self, py: Python<'a>) -> PyResult<&'a PyAny> { + let connector = self.0.clone(); + pyo3_asyncio::tokio::future_into_py(py, async move { + let host = SolrHostWrapper { + solr_host: Arc::new(connector.connect().await.map_err(PyErrWrapper::from)?), + }; + Ok(Python::with_gil(|_| host)) + }) + } + + pub fn connect_blocking(&self) -> PyResult { + let host = SolrHostWrapper { + solr_host: Arc::new(self.0.connect_blocking().map_err(PyErrWrapper::from)?), + }; + Ok(Python::with_gil(|_| host)) + } +} diff --git a/wrappers/python/src/lib.rs b/wrappers/python/src/lib.rs new file mode 100644 index 0000000..afcdd01 --- /dev/null +++ b/wrappers/python/src/lib.rs @@ -0,0 +1,65 @@ +pub mod clients; +pub mod hosts; +pub mod models; +pub mod queries; + +use crate::clients::clients as clients_module; +use crate::hosts::hosts as hosts_module; +use crate::models::auth::auth as auth_module; +use crate::models::group::group as group_module; +use crate::models::response::response as response_module; +use crate::queries::alias::alias; +use crate::queries::collection::collection; +use crate::queries::config::config; +use crate::queries::index::{ + CommitTypeWrapper, DeleteQueryBuilderWrapper, UpdateQueryBuilderWrapper, +}; +use crate::queries::select::SelectQueryBuilderWrapper; +use pyo3::prelude::*; +use pyo3::types::PyDict; +use pyo3::wrap_pymodule; + +#[pymodule] +#[pyo3(name = "queries")] +fn queries_module(_py: Python, m: &PyModule) -> PyResult<()> { + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + Ok(()) +} + +/// A Python module implemented in Rust. +#[pymodule] +fn solrstice(_py: Python, m: &PyModule) -> PyResult<()> { + let sys = PyModule::import(_py, "sys")?; + let sys_modules: &PyDict = sys.getattr("modules")?.downcast()?; + + m.add_wrapped(wrap_pymodule!(config))?; + sys_modules.set_item("solrstice.config", m.getattr("config")?)?; + + m.add_wrapped(wrap_pymodule!(collection))?; + sys_modules.set_item("solrstice.collection", m.getattr("collection")?)?; + + m.add_wrapped(wrap_pymodule!(alias))?; + sys_modules.set_item("solrstice.alias", m.getattr("alias")?)?; + + m.add_wrapped(wrap_pymodule!(clients_module))?; + sys_modules.set_item("solrstice.clients", m.getattr("clients")?)?; + + m.add_wrapped(wrap_pymodule!(hosts_module))?; + sys_modules.set_item("solrstice.hosts", m.getattr("hosts")?)?; + + m.add_wrapped(wrap_pymodule!(auth_module))?; + sys_modules.set_item("solrstice.auth", m.getattr("auth")?)?; + + m.add_wrapped(wrap_pymodule!(queries_module))?; + sys_modules.set_item("solrstice.queries", m.getattr("queries")?)?; + + m.add_wrapped(wrap_pymodule!(response_module))?; + sys_modules.set_item("solrstice.response", m.getattr("response")?)?; + + m.add_wrapped(wrap_pymodule!(group_module))?; + sys_modules.set_item("solrstice.group", m.getattr("group")?)?; + Ok(()) +} diff --git a/wrappers/python/src/models/auth.rs b/wrappers/python/src/models/auth.rs new file mode 100644 index 0000000..016cd33 --- /dev/null +++ b/wrappers/python/src/models/auth.rs @@ -0,0 +1,39 @@ +use pyo3::prelude::*; +use solrstice::models::auth::{SolrAuth, SolrBasicAuth}; +use std::sync::Arc; + +#[pymodule] +pub fn auth(_py: Python<'_>, m: &PyModule) -> PyResult<()> { + m.add_class::()?; + m.add_class::()?; + Ok(()) +} + +#[pyclass(name = "SolrAuth", subclass, module = "solrstice.auth")] +#[derive(Clone)] +pub struct SolrAuthWrapper { + pub solr_auth: Arc, +} + +impl SolrAuth for SolrAuthWrapper { + fn add_auth_to_request(&self, request: reqwest::RequestBuilder) -> reqwest::RequestBuilder { + self.solr_auth.add_auth_to_request(request) + } +} + +#[pyclass(name = "SolrBasicAuth", extends=SolrAuthWrapper, module = "solrstice.auth")] +#[derive(Clone)] +pub struct SolrBasicAuthWrapper {} + +#[pymethods] +impl SolrBasicAuthWrapper { + #[new] + pub fn new(username: String, password: Option) -> (Self, SolrAuthWrapper) { + ( + SolrBasicAuthWrapper {}, + SolrAuthWrapper { + solr_auth: Arc::new(SolrBasicAuth { username, password }), + }, + ) + } +} diff --git a/wrappers/python/src/models/context.rs b/wrappers/python/src/models/context.rs new file mode 100644 index 0000000..e06b883 --- /dev/null +++ b/wrappers/python/src/models/context.rs @@ -0,0 +1,33 @@ +use crate::hosts::SolrHostWrapper; +use crate::models::auth::SolrAuthWrapper; +use pyo3::prelude::*; +use solrstice::models::context::{SolrServerContext, SolrServerContextBuilder}; + +#[pyclass(name = "SolrServerContext", subclass, module = "solrstice.hosts")] +#[derive(Clone)] +pub struct SolrServerContextWrapper(SolrServerContext); + +#[pymethods] +impl SolrServerContextWrapper { + #[new] + pub fn new(host: SolrHostWrapper, auth: Option) -> Self { + let mut builder = SolrServerContextBuilder::new(host); + builder = match auth { + Some(auth) => builder.with_auth(auth), + None => builder, + }; + SolrServerContextWrapper(builder.build()) + } +} + +impl From for SolrServerContext { + fn from(value: SolrServerContextWrapper) -> Self { + value.0 + } +} + +impl<'a> From<&'a SolrServerContextWrapper> for &'a SolrServerContext { + fn from(value: &'a SolrServerContextWrapper) -> Self { + &value.0 + } +} diff --git a/wrappers/python/src/models/error.rs b/wrappers/python/src/models/error.rs new file mode 100644 index 0000000..474c4fe --- /dev/null +++ b/wrappers/python/src/models/error.rs @@ -0,0 +1,31 @@ +use pyo3::exceptions::PyRuntimeError; +use pyo3::prelude::*; +use pyo3::PyDowncastError; +use pythonize::PythonizeError; +use solrstice::models::error::SolrError; + +pub struct PyErrWrapper(PyErr); + +impl From for PyErrWrapper { + fn from(err: SolrError) -> PyErrWrapper { + PyErrWrapper(PyRuntimeError::new_err(err.to_string())) + } +} + +impl From for PyErr { + fn from(err: PyErrWrapper) -> PyErr { + err.0 + } +} + +impl From for PyErrWrapper { + fn from(err: PythonizeError) -> Self { + PyErrWrapper(PyRuntimeError::new_err(err.to_string())) + } +} + +impl From> for PyErrWrapper { + fn from(err: PyDowncastError) -> Self { + PyErrWrapper(PyRuntimeError::new_err(err.to_string())) + } +} diff --git a/wrappers/python/src/models/group.rs b/wrappers/python/src/models/group.rs new file mode 100644 index 0000000..3bf53c2 --- /dev/null +++ b/wrappers/python/src/models/group.rs @@ -0,0 +1,91 @@ +use crate::models::error::PyErrWrapper; +use crate::models::response::SolrDocsResponseWrapper; +use crate::queries::components::grouping::{GroupFormattingWrapper, GroupingComponentWrapper}; +use pyo3::prelude::*; +use pythonize::pythonize; +use solrstice::models::group::{SolrGroupFieldResult, SolrGroupResult}; + +#[pymodule] +pub fn group(_py: Python, m: &PyModule) -> PyResult<()> { + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + Ok(()) +} + +#[derive(Clone)] +#[pyclass(name = "SolrGroupResult", module = "solrstice.group")] +pub struct SolrGroupResultWrapper(SolrGroupResult); + +#[derive(Clone)] +#[pyclass(name = "SolrGroupFieldResult", module = "solrstice.group")] +pub struct SolrGroupFieldResultWrapper { + #[pyo3(get)] + pub group_value: PyObject, + #[pyo3(get)] + pub doc_list: SolrDocsResponseWrapper, +} + +#[pymethods] +impl SolrGroupResultWrapper { + #[getter] + pub fn get_matches(&self) -> usize { + self.0.matches + } + + #[getter] + pub fn get_n_groups(&self) -> Option { + self.0.n_groups + } + + pub fn get_field_result(&self) -> PyResult>> { + let result = self.0.get_field_result(); + match result { + Some(v) => { + let result = v + .iter() + .map(|v| { + SolrGroupFieldResultWrapper::try_from(v.clone()).map_err(PyErrWrapper::from) + }) + .collect::, PyErrWrapper>>(); + Ok(Some(result?.to_vec())) + } + None => Ok(None), + } + } + + pub fn get_query_result(&self) -> PyResult> { + match self.0.get_query_result() { + Some(v) => Ok(Some(SolrDocsResponseWrapper::try_from(v.to_owned())?)), + None => Ok(None), + } + } + + pub fn get_simple_result(&self) -> PyResult> { + match self.0.get_simple_result() { + Some(v) => Ok(Some(SolrDocsResponseWrapper::try_from(v.to_owned())?)), + None => Ok(None), + } + } +} + +impl From for SolrGroupResultWrapper { + fn from(value: SolrGroupResult) -> Self { + SolrGroupResultWrapper(value) + } +} + +impl TryFrom for SolrGroupFieldResultWrapper { + type Error = PyErrWrapper; + + fn try_from(value: SolrGroupFieldResult) -> Result { + Python::with_gil(|py| -> Result { + let group_value = pythonize(py, &value.group_value).map_err(PyErrWrapper::from)?; + Ok(SolrGroupFieldResultWrapper { + group_value, + doc_list: SolrDocsResponseWrapper::try_from(value.doc_list)?, + }) + }) + } +} diff --git a/wrappers/python/src/models/mod.rs b/wrappers/python/src/models/mod.rs new file mode 100644 index 0000000..d92c7e8 --- /dev/null +++ b/wrappers/python/src/models/mod.rs @@ -0,0 +1,5 @@ +pub mod auth; +pub mod context; +pub mod error; +pub mod group; +pub mod response; diff --git a/wrappers/python/src/models/response.rs b/wrappers/python/src/models/response.rs new file mode 100644 index 0000000..69c0cdc --- /dev/null +++ b/wrappers/python/src/models/response.rs @@ -0,0 +1,91 @@ +use crate::models::error::PyErrWrapper; +use crate::models::group::{SolrGroupFieldResultWrapper, SolrGroupResultWrapper}; +use pyo3::prelude::*; +use pythonize::pythonize; +use solrstice::models::response::{SolrDocsResponse, SolrResponse}; +use std::collections::HashMap; + +#[pymodule] +pub fn response(_py: Python, m: &PyModule) -> PyResult<()> { + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + Ok(()) +} + +#[derive(Clone)] +#[pyclass(name = "SolrDocsResponse", module = "solrstice.response")] +pub struct SolrDocsResponseWrapper { + #[pyo3(get)] + pub num_found: usize, + #[pyo3(get)] + pub start: usize, + #[pyo3(get)] + pub num_found_exact: bool, + #[pyo3(get)] + pub docs: Vec, +} + +#[derive(Clone)] +#[pyclass(name = "SolrResponse", module = "solrstice.response")] +pub struct SolrResponseWrapper(SolrResponse); + +impl From for SolrResponseWrapper { + fn from(value: SolrResponse) -> Self { + SolrResponseWrapper(value) + } +} + +#[pymethods] +impl SolrResponseWrapper { + pub fn get_response(&self) -> PyResult> { + match self.0.get_response() { + Some(v) => Ok(Some( + SolrDocsResponseWrapper::try_from(v.to_owned()).map_err(PyErrWrapper::from)?, + )), + None => Ok(None), + } + } + + pub fn get_groups(&self) -> Option> { + let groups = self.0.get_groups(); + match groups { + None => None, + Some(groups) => { + let groups = groups + .iter() + .map(|(k, v)| (k.to_owned(), SolrGroupResultWrapper::from(v.to_owned()))) + .collect::>(); + Some(groups) + } + } + } + + #[getter] + pub fn get_next_cursor_mark(&self) -> Option<&str> { + self.0.next_cursor_mark.as_deref() + } +} + +impl TryFrom for SolrDocsResponseWrapper { + type Error = PyErrWrapper; + + fn try_from(value: SolrDocsResponse) -> Result { + Python::with_gil(|py| -> Result { + let docs = value + .get_docs::() + .map_err(PyErrWrapper::from)?; + let docs = docs + .iter() + .map(|x| pythonize(py, x).map_err(PyErrWrapper::from)) + .collect::, _>>()?; + Ok(SolrDocsResponseWrapper { + num_found: value.num_found, + start: value.start, + num_found_exact: value.num_found_exact, + docs, + }) + }) + } +} diff --git a/wrappers/python/src/queries/alias.rs b/wrappers/python/src/queries/alias.rs new file mode 100644 index 0000000..b5643c6 --- /dev/null +++ b/wrappers/python/src/queries/alias.rs @@ -0,0 +1,147 @@ +use crate::models::context::SolrServerContextWrapper; +use crate::models::error::PyErrWrapper; +use pyo3::prelude::*; +use solrstice::queries::alias::{ + alias_exists as alias_exists_rs, create_alias as create_alias_rs, + delete_alias as delete_alias_rs, get_aliases as get_aliases_rs, +}; +use solrstice::queries::alias::{ + alias_exists_blocking as alias_exists_blocking_rs, + create_alias_blocking as create_alias_blocking_rs, + delete_alias_blocking as delete_alias_blocking_rs, + get_aliases_blocking as get_aliases_blocking_rs, +}; +use std::collections::HashMap; + +#[pymodule] +pub fn alias(_py: Python<'_>, m: &PyModule) -> PyResult<()> { + m.add_function(wrap_pyfunction!(get_aliases, m)?)?; + m.add_function(wrap_pyfunction!(create_alias, m)?)?; + m.add_function(wrap_pyfunction!(alias_exists, m)?)?; + m.add_function(wrap_pyfunction!(delete_alias, m)?)?; + + m.add_function(wrap_pyfunction!(get_aliases_blocking, m)?)?; + m.add_function(wrap_pyfunction!(create_alias_blocking, m)?)?; + m.add_function(wrap_pyfunction!(alias_exists_blocking, m)?)?; + m.add_function(wrap_pyfunction!(delete_alias_blocking, m)?)?; + Ok(()) +} + +#[pyfunction] +pub fn get_aliases(py: Python, context: SolrServerContextWrapper) -> PyResult<&PyAny> { + pyo3_asyncio::tokio::future_into_py(py, async move { + let result = get_aliases_rs(&context.into()) + .await + .map_err(PyErrWrapper::from)?; + Ok(Python::with_gil(|_| result)) + }) +} + +#[pyfunction] +pub fn get_aliases_blocking( + py: Python, + context: SolrServerContextWrapper, +) -> PyResult>> { + py.allow_threads(move || { + let result = get_aliases_blocking_rs(&context.into()).map_err(PyErrWrapper::from)?; + Ok(result) + }) +} + +#[pyfunction] +pub fn create_alias( + py: Python, + context: SolrServerContextWrapper, + name: String, + collections: Vec, +) -> PyResult<&PyAny> { + pyo3_asyncio::tokio::future_into_py(py, async move { + let result = create_alias_rs( + &context.into(), + name.as_str(), + collections + .iter() + .map(|x| x.as_str()) + .collect::>() + .as_slice(), + ) + .await + .map_err(PyErrWrapper::from)?; + Ok(result) + }) +} + +#[pyfunction] +pub fn create_alias_blocking( + py: Python, + context: SolrServerContextWrapper, + name: String, + collections: Vec, +) -> PyResult<()> { + py.allow_threads(move || { + let result = create_alias_blocking_rs( + &context.into(), + name.as_str(), + collections + .iter() + .map(|x| x.as_str()) + .collect::>() + .as_slice(), + ) + .map_err(PyErrWrapper::from)?; + Ok(result) + }) +} + +#[pyfunction] +pub fn alias_exists( + py: Python, + context: SolrServerContextWrapper, + name: String, +) -> PyResult<&PyAny> { + pyo3_asyncio::tokio::future_into_py(py, async move { + let result = alias_exists_rs(&context.into(), name.as_str()) + .await + .map_err(PyErrWrapper::from)?; + Ok(Python::with_gil(|_| result)) + }) +} + +#[pyfunction] +pub fn alias_exists_blocking( + py: Python, + context: SolrServerContextWrapper, + name: String, +) -> PyResult { + py.allow_threads(move || { + let result = + alias_exists_blocking_rs(&context.into(), name.as_str()).map_err(PyErrWrapper::from)?; + Ok(result) + }) +} + +#[pyfunction] +pub fn delete_alias( + py: Python, + context: SolrServerContextWrapper, + name: String, +) -> PyResult<&PyAny> { + pyo3_asyncio::tokio::future_into_py(py, async move { + delete_alias_rs(&context.into(), name.as_str()) + .await + .map_err(PyErrWrapper::from)?; + Ok(Python::with_gil(|_| ())) + }) +} + +#[pyfunction] +pub fn delete_alias_blocking( + py: Python, + context: SolrServerContextWrapper, + name: String, +) -> PyResult<()> { + py.allow_threads(move || { + delete_alias_blocking_rs(&context.into(), name.as_str()).map_err(PyErrWrapper::from)?; + Ok(()) + }) +} diff --git a/wrappers/python/src/queries/collection.rs b/wrappers/python/src/queries/collection.rs new file mode 100644 index 0000000..5a9a6f1 --- /dev/null +++ b/wrappers/python/src/queries/collection.rs @@ -0,0 +1,147 @@ +use crate::models::context::SolrServerContextWrapper; +use crate::models::error::PyErrWrapper; +use pyo3::prelude::*; +use solrstice::queries::collection::{ + collection_exists as collection_exists_rs, create_collection as create_collection_rs, + delete_collection as delete_collection_rs, get_collections as get_collections_rs, +}; +use solrstice::queries::collection::{ + collection_exists_blocking as collection_exists_blocking_rs, + create_collection_blocking as create_collection_blocking_rs, + delete_collection_blocking as delete_collection_blocking_rs, + get_collections_blocking as get_collections_blocking_rs, +}; + +#[pymodule] +pub fn collection(_py: Python<'_>, m: &PyModule) -> PyResult<()> { + m.add_function(wrap_pyfunction!(create_collection, m)?)?; + m.add_function(wrap_pyfunction!(get_collections, m)?)?; + m.add_function(wrap_pyfunction!(collection_exists, m)?)?; + m.add_function(wrap_pyfunction!(delete_collection, m)?)?; + + m.add_function(wrap_pyfunction!(create_collection_blocking, m)?)?; + m.add_function(wrap_pyfunction!(get_collections_blocking, m)?)?; + m.add_function(wrap_pyfunction!(collection_exists_blocking, m)?)?; + m.add_function(wrap_pyfunction!(delete_collection_blocking, m)?)?; + Ok(()) +} + +#[pyfunction] +pub fn create_collection( + py: Python, + context: SolrServerContextWrapper, + name: String, + config: String, + shards: Option, + replication_factor: Option, +) -> PyResult<&PyAny> { + pyo3_asyncio::tokio::future_into_py(py, async move { + let result = create_collection_rs( + &context.into(), + name.as_str(), + config.as_str(), + shards.unwrap_or(1), + replication_factor.unwrap_or(1), + ) + .await + .map_err(PyErrWrapper::from)?; + Ok(Python::with_gil(|_| result)) + }) +} + +#[pyfunction] +pub fn create_collection_blocking( + py: Python, + context: SolrServerContextWrapper, + name: String, + config: String, + shards: Option, + replication_factor: Option, +) -> PyResult<()> { + py.allow_threads(move || { + create_collection_blocking_rs( + &context.into(), + name.as_str(), + config.as_str(), + shards.unwrap_or(1), + replication_factor.unwrap_or(1), + ) + .map_err(PyErrWrapper::from)?; + Ok(()) + }) +} + +#[pyfunction] +pub fn get_collections(py: Python, context: SolrServerContextWrapper) -> PyResult<&PyAny> { + pyo3_asyncio::tokio::future_into_py(py, async move { + let result = get_collections_rs(&context.into()) + .await + .map_err(PyErrWrapper::from)?; + Ok(Python::with_gil(|_| result)) + }) +} + +#[pyfunction] +pub fn get_collections_blocking( + py: Python, + context: SolrServerContextWrapper, +) -> PyResult> { + py.allow_threads(move || { + let result = get_collections_blocking_rs(&context.into()).map_err(PyErrWrapper::from)?; + Ok(result) + }) +} + +#[pyfunction] +pub fn collection_exists( + py: Python, + context: SolrServerContextWrapper, + name: String, +) -> PyResult<&PyAny> { + pyo3_asyncio::tokio::future_into_py(py, async move { + let result = collection_exists_rs(&context.into(), name.as_str()) + .await + .map_err(PyErrWrapper::from)?; + Ok(Python::with_gil(|_| result)) + }) +} + +#[pyfunction] +pub fn collection_exists_blocking( + py: Python, + context: SolrServerContextWrapper, + name: String, +) -> PyResult { + py.allow_threads(move || { + let result = collection_exists_blocking_rs(&context.into(), name.as_str()) + .map_err(PyErrWrapper::from)?; + Ok(result) + }) +} + +#[pyfunction] +pub fn delete_collection( + py: Python, + context: SolrServerContextWrapper, + name: String, +) -> PyResult<&PyAny> { + pyo3_asyncio::tokio::future_into_py(py, async move { + delete_collection_rs(&context.into(), name.as_str()) + .await + .map_err(PyErrWrapper::from)?; + Ok(()) + }) +} + +#[pyfunction] +pub fn delete_collection_blocking( + py: Python, + context: SolrServerContextWrapper, + name: String, +) -> PyResult<()> { + py.allow_threads(move || { + delete_collection_blocking_rs(&context.into(), name.as_str()) + .map_err(PyErrWrapper::from)?; + Ok(()) + }) +} diff --git a/wrappers/python/src/queries/components/grouping.rs b/wrappers/python/src/queries/components/grouping.rs new file mode 100644 index 0000000..c5120f2 --- /dev/null +++ b/wrappers/python/src/queries/components/grouping.rs @@ -0,0 +1,187 @@ +use pyo3::prelude::*; +use serde::{Deserialize, Serialize}; +use solrstice::queries::components::grouping::{GroupFormatting, GroupingComponentBuilder}; + +#[pyclass(name = "GroupingComponent", module = "solrstice.group")] +#[derive(Clone, Serialize, Deserialize)] +pub struct GroupingComponentWrapper(GroupingComponentBuilder); + +impl From for GroupingComponentBuilder { + fn from(wrapper: GroupingComponentWrapper) -> Self { + wrapper.0 + } +} + +impl<'a> From<&'a GroupingComponentWrapper> for &'a GroupingComponentBuilder { + fn from(wrapper: &'a GroupingComponentWrapper) -> Self { + &wrapper.0 + } +} + +#[derive(Clone, Copy, Serialize, Deserialize)] +#[pyclass(name = "GroupFormatting", module = "solrstice.group")] +pub enum GroupFormattingWrapper { + Simple, + Grouped, +} + +impl From for GroupFormatting { + fn from(wrapper: GroupFormattingWrapper) -> Self { + match wrapper { + GroupFormattingWrapper::Simple => GroupFormatting::Simple, + GroupFormattingWrapper::Grouped => GroupFormatting::Grouped, + } + } +} + +impl From for GroupFormattingWrapper { + fn from(format: GroupFormatting) -> Self { + match format { + GroupFormatting::Simple => GroupFormattingWrapper::Simple, + GroupFormatting::Grouped => GroupFormattingWrapper::Grouped, + } + } +} + +#[pymethods] +impl GroupingComponentWrapper { + #[new] + pub fn new( + fields: Option>, + queries: Option>, + limit: Option, + offset: Option, + sort: Option>, + format: Option, + main: Option, + n_groups: Option, + truncate: Option, + facet: Option, + ) -> Self { + let builder = GroupingComponentBuilder::new(); + let mut s = Self(builder); + s.set_fields(fields); + s.set_queries(queries); + s.set_limit(limit); + s.set_offset(offset); + s.set_sort(sort); + s.set_format(format); + s.set_main(main); + s.set_n_groups(n_groups); + s.set_truncate(truncate); + s.set_facet(facet); + s + } + + #[setter] + pub fn set_fields(&mut self, fields: Option>) { + self.0.field = fields.map_or(None, |x| { + Some(x.into_iter().map(|x| x.to_string()).collect()) + }); + } + + #[getter] + pub fn get_fields(&self) -> Option> { + self.0.field.clone() + } + + #[setter] + pub fn set_queries(&mut self, queries: Option>) { + self.0.queries = queries.map_or(None, |x| { + Some(x.into_iter().map(|x| x.to_string()).collect()) + }); + } + + #[getter] + pub fn get_queries(&self) -> Option> { + self.0.queries.clone() + } + + #[setter] + pub fn set_limit(&mut self, limit: Option) { + self.0.limit = limit; + } + + #[getter] + pub fn get_limit(&self) -> Option { + self.0.limit + } + + #[setter] + pub fn set_offset(&mut self, offset: Option) { + self.0.offset = offset; + } + + #[getter] + pub fn get_offset(&self) -> Option { + self.0.offset + } + + #[setter] + pub fn set_sort(&mut self, sort: Option>) { + self.0.sort = sort.map_or(None, |x| { + Some(x.into_iter().map(|x| x.to_string()).collect()) + }); + } + + #[getter] + pub fn get_sort(&self) -> Option> { + self.0.sort.clone() + } + + #[setter] + pub fn set_format(&mut self, format: Option) { + self.0.format = format.map_or(None, |x| Some(x.into())); + } + + #[getter] + pub fn get_format(&self) -> Option { + self.0.format.map_or(None, |x| Some(x.into())) + } + + #[setter] + pub fn set_main(&mut self, main: Option) { + self.0.main = main + } + + #[getter] + pub fn get_main(&self) -> Option { + self.0.main + } + + #[setter] + pub fn set_n_groups(&mut self, n_groups: Option) { + self.0.n_groups = n_groups + } + + #[getter] + pub fn get_n_groups(&self) -> Option { + self.0.n_groups + } + + #[setter] + pub fn set_truncate(&mut self, truncate: Option) { + self.0.truncate = truncate + } + + #[getter] + pub fn get_truncate(&self) -> Option { + self.0.truncate + } + + #[setter] + pub fn set_facet(&mut self, facet: Option) { + self.0.facet = facet + } + + #[getter] + pub fn get_facet(&self) -> Option { + self.0.facet + } +} + +impl From for GroupingComponentWrapper { + fn from(builder: GroupingComponentBuilder) -> Self { + Self(builder) + } +} diff --git a/wrappers/python/src/queries/components/mod.rs b/wrappers/python/src/queries/components/mod.rs new file mode 100644 index 0000000..9ccaae7 --- /dev/null +++ b/wrappers/python/src/queries/components/mod.rs @@ -0,0 +1 @@ +pub mod grouping; diff --git a/wrappers/python/src/queries/config.rs b/wrappers/python/src/queries/config.rs new file mode 100644 index 0000000..ce5860e --- /dev/null +++ b/wrappers/python/src/queries/config.rs @@ -0,0 +1,131 @@ +use crate::models::context::SolrServerContextWrapper; +use crate::models::error::PyErrWrapper; +use pyo3::prelude::*; +use solrstice::queries::config::{ + config_exists as config_exists_rs, delete_config as delete_config_rs, + get_configs as get_configs_rs, upload_config as upload_config_rs, +}; +use solrstice::queries::config::{ + config_exists_blocking as config_exists_blocking_rs, + delete_config_blocking as delete_config_blocking_rs, + get_configs_blocking as get_configs_blocking_rs, + upload_config_blocking as upload_config_blocking_rs, +}; +use std::path::PathBuf; + +#[pymodule] +pub fn config(_py: Python<'_>, m: &PyModule) -> PyResult<()> { + m.add_function(wrap_pyfunction!(upload_config, m)?)?; + m.add_function(wrap_pyfunction!(get_configs, m)?)?; + m.add_function(wrap_pyfunction!(config_exists, m)?)?; + m.add_function(wrap_pyfunction!(delete_config, m)?)?; + + m.add_function(wrap_pyfunction!(upload_config_blocking, m)?)?; + m.add_function(wrap_pyfunction!(get_configs_blocking, m)?)?; + m.add_function(wrap_pyfunction!(config_exists_blocking, m)?)?; + m.add_function(wrap_pyfunction!(delete_config_blocking, m)?)?; + Ok(()) +} + +#[pyfunction] +pub fn upload_config( + py: Python, + context: SolrServerContextWrapper, + name: String, + path: PathBuf, +) -> PyResult<&PyAny> { + pyo3_asyncio::tokio::future_into_py(py, async move { + let result = upload_config_rs(&context.into(), name.as_str(), path.as_path()) + .await + .map_err(PyErrWrapper::from)?; + Ok(Python::with_gil(|_| result)) + }) +} + +#[pyfunction] +pub fn upload_config_blocking( + py: Python, + context: SolrServerContextWrapper, + name: String, + path: PathBuf, +) -> PyResult<()> { + py.allow_threads(move || { + upload_config_blocking_rs(&context.into(), name.as_str(), path.as_path()) + .map_err(PyErrWrapper::from)?; + Ok(()) + }) +} + +#[pyfunction] +pub fn get_configs(py: Python, context: SolrServerContextWrapper) -> PyResult<&PyAny> { + pyo3_asyncio::tokio::future_into_py(py, async move { + let result = get_configs_rs(&context.into()) + .await + .map_err(PyErrWrapper::from)?; + Ok(Python::with_gil(|_| result)) + }) +} + +#[pyfunction] +pub fn get_configs_blocking( + py: Python, + context: SolrServerContextWrapper, +) -> PyResult> { + py.allow_threads(move || { + let result = get_configs_blocking_rs(&context.into()).map_err(PyErrWrapper::from)?; + Ok(result) + }) +} + +#[pyfunction] +pub fn config_exists( + py: Python, + context: SolrServerContextWrapper, + name: String, +) -> PyResult<&PyAny> { + pyo3_asyncio::tokio::future_into_py(py, async move { + let result = config_exists_rs(&context.into(), name.as_str()) + .await + .map_err(PyErrWrapper::from)?; + Ok(Python::with_gil(|_| result)) + }) +} + +#[pyfunction] +pub fn config_exists_blocking( + py: Python, + context: SolrServerContextWrapper, + name: String, +) -> PyResult { + py.allow_threads(move || { + let result = config_exists_blocking_rs(&context.into(), name.as_str()) + .map_err(PyErrWrapper::from)?; + Ok(result) + }) +} + +#[pyfunction] +pub fn delete_config( + py: Python, + context: SolrServerContextWrapper, + name: String, +) -> PyResult<&PyAny> { + pyo3_asyncio::tokio::future_into_py(py, async move { + let result = delete_config_rs(&context.into(), name.as_str()) + .await + .map_err(PyErrWrapper::from)?; + Ok(Python::with_gil(|_| result)) + }) +} + +#[pyfunction] +pub fn delete_config_blocking( + py: Python, + context: SolrServerContextWrapper, + name: String, +) -> PyResult<()> { + py.allow_threads(move || { + delete_config_blocking_rs(&context.into(), name.as_str()).map_err(PyErrWrapper::from)?; + Ok(()) + }) +} diff --git a/wrappers/python/src/queries/index.rs b/wrappers/python/src/queries/index.rs new file mode 100644 index 0000000..a6a496a --- /dev/null +++ b/wrappers/python/src/queries/index.rs @@ -0,0 +1,276 @@ +use crate::models::context::SolrServerContextWrapper; +use crate::models::error::PyErrWrapper; +use crate::models::response::SolrResponseWrapper; +use pyo3::prelude::*; +use pyo3::types::PyBytes; +use pythonize::depythonize; +use serde::{Deserialize, Serialize}; +use solrstice::models::commit_type::CommitType; +use solrstice::models::error::SolrError; +use solrstice::queries::index::{DeleteQueryBuilder, UpdateQueryBuilder}; + +#[pyclass(name = "CommitType")] +#[derive(Clone, Copy, Serialize, Deserialize)] +pub enum CommitTypeWrapper { + Hard, + Soft, +} + +#[derive(Clone, Default, Serialize, Deserialize)] +#[pyclass(name = "UpdateQueryBuilder", module = "solrstice.queries")] +pub struct UpdateQueryBuilderWrapper(UpdateQueryBuilder); + +#[pymethods] +impl UpdateQueryBuilderWrapper { + #[new] + pub fn new(handler: Option, commit_type: Option) -> Self { + let mut s = Self(UpdateQueryBuilder::new()); + if let Some(handler) = handler { + s.set_handler(handler); + } + if let Some(commit_type) = commit_type { + s.set_commit_type(commit_type); + } + s + } + + #[getter] + pub fn get_handler(&self) -> &str { + self.0.handler.as_str() + } + + #[setter] + pub fn set_handler(&mut self, handler: String) { + self.0.handler = handler; + } + + #[getter] + pub fn get_commit_type(&self) -> CommitTypeWrapper { + self.0.commit_type.into() + } + + #[setter] + pub fn set_commit_type(&mut self, commit_type: CommitTypeWrapper) { + self.0.commit_type = commit_type.into(); + } + + pub fn execute<'a>( + &self, + py: Python<'a>, + context: SolrServerContextWrapper, + collection: String, + data: Vec, + ) -> PyResult<&'a PyAny> { + let builder = self.0.clone(); + let data: Result, PyErrWrapper> = data + .into_iter() + .map(|x| { + let as_any = x.downcast::(py).map_err(PyErrWrapper::from)?; + depythonize::(as_any).map_err(PyErrWrapper::from) + }) + .collect(); + let data = data?; + pyo3_asyncio::tokio::future_into_py::<_, SolrResponseWrapper>(py, async move { + let result = builder + .execute(&context.into(), collection.as_str(), data.as_slice()) + .await + .map_err(PyErrWrapper::from)?; + Ok(Python::with_gil(|_| result.into())) + }) + } + + pub fn execute_blocking( + &self, + py: Python, + context: SolrServerContextWrapper, + collection: String, + data: Vec, + ) -> PyResult { + let data: Result, PyErrWrapper> = data + .into_iter() + .map(|x| { + let as_any = x.downcast::(py).map_err(PyErrWrapper::from)?; + depythonize::(as_any).map_err(PyErrWrapper::from) + }) + .collect(); + let data = data?; + let builder = self.0.clone(); + py.allow_threads(move || { + let result = builder + .execute_blocking(&context.into(), collection.as_str(), data.as_slice()) + .map_err(PyErrWrapper::from)?; + Ok(result.into()) + }) + } + + pub fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> { + match state.extract::<&PyBytes>(py) { + Ok(s) => { + *self = serde_json::from_slice(s.as_bytes()) + .map_err(SolrError::from) + .map_err(PyErrWrapper::from)?; + Ok(()) + } + Err(e) => Err(e), + } + } + + pub fn __getstate__(&self, py: Python) -> PyResult { + Ok(PyBytes::new( + py, + serde_json::to_string(&self) + .map_err(SolrError::from) + .map_err(PyErrWrapper::from)? + .as_bytes(), + ) + .to_object(py)) + } +} + +impl From for CommitType { + fn from(value: CommitTypeWrapper) -> Self { + match value { + CommitTypeWrapper::Hard => CommitType::Hard, + CommitTypeWrapper::Soft => CommitType::Soft, + } + } +} + +impl From for CommitTypeWrapper { + fn from(value: CommitType) -> Self { + match value { + CommitType::Hard => CommitTypeWrapper::Hard, + CommitType::Soft => CommitTypeWrapper::Soft, + } + } +} + +#[derive(Clone, Default, Serialize, Deserialize)] +#[pyclass(name = "DeleteQueryBuilder", module = "solrstice.queries")] +pub struct DeleteQueryBuilderWrapper(DeleteQueryBuilder); + +#[pymethods] +impl DeleteQueryBuilderWrapper { + #[new] + pub fn new( + handler: Option, + commit_type: Option, + ids: Option>, + queries: Option>, + ) -> Self { + let mut s = Self(DeleteQueryBuilder::new()); + if let Some(handler) = handler { + s.set_handler(handler); + } + if let Some(commit_type) = commit_type { + s.set_commit_type(commit_type); + } + if let Some(ids) = ids { + s.set_ids(Some(ids)); + } + if let Some(queries) = queries { + s.set_queries(Some(queries)); + } + s + } + + #[getter] + pub fn get_handler(&self) -> &str { + self.0.handler.as_str() + } + + #[setter] + pub fn set_handler(&mut self, handler: String) { + self.0.handler = handler; + } + + #[getter] + pub fn get_commit_type(&self) -> CommitTypeWrapper { + self.0.commit_type.into() + } + + #[setter] + pub fn set_commit_type(&mut self, commit_type: CommitTypeWrapper) { + self.0.commit_type = commit_type.into(); + } + + #[getter] + pub fn get_ids(&self) -> Option> { + self.0 + .ids + .as_ref() + .map(|f| f.iter().map(|x| x.as_str()).collect()) + } + + #[setter] + pub fn set_ids(&mut self, ids: Option>) { + self.0.ids = ids.map(|f| f.into_iter().map(|x| x.to_string()).collect()) + } + + #[getter] + pub fn get_queries(&self) -> Option> { + self.0 + .queries + .as_ref() + .map(|f| f.iter().map(|x| x.as_str()).collect()) + } + + #[setter] + pub fn set_queries(&mut self, queries: Option>) { + self.0.queries = queries.map(|f| f.into_iter().map(|x| x.to_string()).collect()) + } + + pub fn execute<'a>( + &self, + py: Python<'a>, + context: SolrServerContextWrapper, + collection: String, + ) -> PyResult<&'a PyAny> { + let builder = self.0.clone(); + pyo3_asyncio::tokio::future_into_py::<_, SolrResponseWrapper>(py, async move { + let result = builder + .execute(&context.into(), collection.as_str()) + .await + .map_err(PyErrWrapper::from)?; + Ok(Python::with_gil(|_| result.into())) + }) + } + + pub fn execute_blocking( + &self, + py: Python, + context: SolrServerContextWrapper, + collection: String, + ) -> PyResult { + let builder = self.0.clone(); + py.allow_threads(move || { + let result = builder + .execute_blocking(&context.into(), collection.as_str()) + .map_err(PyErrWrapper::from)?; + Ok(result.into()) + }) + } + + pub fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> { + match state.extract::<&PyBytes>(py) { + Ok(s) => { + *self = serde_json::from_slice(s.as_bytes()) + .map_err(SolrError::from) + .map_err(PyErrWrapper::from)?; + Ok(()) + } + Err(e) => Err(e), + } + } + + pub fn __getstate__(&self, py: Python) -> PyResult { + Ok(PyBytes::new( + py, + serde_json::to_string(&self) + .map_err(SolrError::from) + .map_err(PyErrWrapper::from)? + .as_bytes(), + ) + .to_object(py)) + } +} diff --git a/wrappers/python/src/queries/mod.rs b/wrappers/python/src/queries/mod.rs new file mode 100644 index 0000000..5ae7f56 --- /dev/null +++ b/wrappers/python/src/queries/mod.rs @@ -0,0 +1,6 @@ +pub mod alias; +pub mod collection; +pub mod components; +pub mod config; +pub mod index; +pub mod select; diff --git a/wrappers/python/src/queries/select.rs b/wrappers/python/src/queries/select.rs new file mode 100644 index 0000000..976cb7c --- /dev/null +++ b/wrappers/python/src/queries/select.rs @@ -0,0 +1,181 @@ +use crate::models::context::SolrServerContextWrapper; +use crate::models::error::PyErrWrapper; +use crate::models::response::SolrResponseWrapper; +use crate::queries::components::grouping::GroupingComponentWrapper; +use pyo3::prelude::*; +use pyo3::types::PyBytes; +use serde::{Deserialize, Serialize}; +use solrstice::models::error::SolrError; +use solrstice::queries::select::SelectQueryBuilder; +#[pyclass(name = "SelectQueryBuilder", module = "solrstice.queries")] +#[derive(Clone, Serialize, Deserialize)] +pub struct SelectQueryBuilderWrapper(SelectQueryBuilder); + +#[pymethods] +impl SelectQueryBuilderWrapper { + #[new] + fn new( + q: Option, + fl: Option>, + fq: Option>, + rows: Option, + start: Option, + sort: Option>, + cursor_mark: Option, + grouping: Option, + ) -> Self { + let builder = SelectQueryBuilder::new(); + let mut s = Self(builder); + if let Some(q) = q { + s.set_q(q); + } + s.set_fl(fl); + s.set_fq(fq); + if let Some(rows) = rows { + s.set_rows(rows); + } + if let Some(start) = start { + s.set_start(start); + } + s.set_sort(sort); + s.set_cursor_mark(cursor_mark); + s.set_grouping(grouping); + s + } + + #[getter] + fn get_q(&self) -> &str { + &self.0.q + } + + #[setter] + fn set_q(&mut self, q: String) { + self.0.q = q + } + + #[getter] + fn get_fl(&self) -> Option> { + self.0.fl.clone() + } + + #[setter] + fn set_fl(&mut self, fl: Option>) { + self.0.fl = fl.map(|f| f.into_iter().map(|x| x.to_string()).collect()) + } + + #[getter] + fn get_fq(&self) -> Option> { + self.0.fq.clone() + } + + #[setter] + fn set_fq(&mut self, fq: Option>) { + self.0.fq = fq.map(|f| f.into_iter().map(|x| x.to_string()).collect()) + } + + #[getter] + fn get_rows(&self) -> usize { + self.0.rows + } + + #[setter] + fn set_rows(&mut self, rows: usize) { + self.0.rows = rows + } + + #[getter] + fn get_start(&self) -> usize { + self.0.start + } + + #[setter] + fn set_start(&mut self, start: usize) { + self.0.start = start + } + + #[getter] + fn get_sort(&self) -> Option> { + self.0.sort.clone() + } + + #[setter] + fn set_sort(&mut self, sort: Option>) { + self.0.sort = sort.map(|f| f.into_iter().map(|x| x.to_string()).collect()) + } + + #[getter] + fn get_cursor_mark(&self) -> Option { + self.0.cursor_mark.clone() + } + + #[setter] + fn set_cursor_mark(&mut self, cursor_mark: Option) { + self.0.cursor_mark = cursor_mark + } + + #[getter] + fn get_grouping(&self) -> Option { + self.0.grouping.clone().map(|g| g.into()) + } + + #[setter] + fn set_grouping(&mut self, grouping: Option) { + self.0.grouping = grouping.map(|g| g.into()) + } + + pub fn execute<'a>( + &self, + py: Python<'a>, + context: SolrServerContextWrapper, + collection: String, + ) -> PyResult<&'a PyAny> { + let builder = self.0.clone(); + pyo3_asyncio::tokio::future_into_py(py, async move { + let result: SolrResponseWrapper = builder + .execute(&context.into(), &collection) + .await + .map_err(PyErrWrapper::from)? + .into(); + Ok(Python::with_gil(|_| result)) + }) + } + + pub fn execute_blocking( + &self, + py: Python, + contect: SolrServerContextWrapper, + collection: String, + ) -> PyResult { + let builder = self.0.clone(); + py.allow_threads(move || { + let result: SolrResponseWrapper = builder + .execute_blocking(&contect.into(), &collection) + .map_err(PyErrWrapper::from)? + .into(); + Ok(result) + }) + } + + pub fn __setstate__(&mut self, py: Python, state: PyObject) -> PyResult<()> { + match state.extract::<&PyBytes>(py) { + Ok(s) => { + *self = serde_json::from_slice(s.as_bytes()) + .map_err(SolrError::from) + .map_err(PyErrWrapper::from)?; + Ok(()) + } + Err(e) => Err(e), + } + } + + pub fn __getstate__(&self, py: Python) -> PyResult { + Ok(PyBytes::new( + py, + serde_json::to_string(&self) + .map_err(SolrError::from) + .map_err(PyErrWrapper::from)? + .as_bytes(), + ) + .to_object(py)) + } +} diff --git a/wrappers/python/tests/helpers.py b/wrappers/python/tests/helpers.py new file mode 100644 index 0000000..3bda9ad --- /dev/null +++ b/wrappers/python/tests/helpers.py @@ -0,0 +1,113 @@ +import os +import time +from dataclasses import dataclass +from typing import List, Optional +from urllib.error import HTTPError +from urllib.request import urlopen + +from dataclasses_json import dataclass_json, DataClassJsonMixin +from dotenv import load_dotenv + +from solrstice.auth import SolrBasicAuth +from solrstice.collection import delete_collection, create_collection +from solrstice.config import delete_config, upload_config +from solrstice.hosts import SolrServerContext, SolrSingleServerHost +from solrstice.queries import UpdateQueryBuilder + + +@dataclass +class Config: + solr_host: str + speedbump_host: Optional[str] + solr_username: Optional[str] + solr_password: Optional[str] + context: SolrServerContext + config_path: str + + +def create_config() -> Config: + path = '../../test_setup/.env' + load_dotenv(path) + solr_auth = None + solr_username = os.getenv('SOLR_USERNAME') + solr_password = os.getenv('SOLR_PASSWORD') + if solr_password is not None and solr_username is not '': + solr_auth = SolrBasicAuth( + solr_username, solr_password + ) + host = os.getenv('SOLR_HOST') + speedbump_host = os.getenv('SPEEDBUMP_HOST') + solr_host = SolrSingleServerHost(host) + wait_for_solr(host, 30) + return Config(host, speedbump_host, solr_username, solr_password, SolrServerContext(solr_host, solr_auth), "../../test_setup/test_collection") + + +def wait_for_solr(host: str, max_time: int): + end = time.time() + max_time + while time.time() < end: + try: + with urlopen(f'{host}{"/solr/admin/collections"}?action=CLUSTERSTATUS') as response: + if response.status == 200: + return + except HTTPError as e: + if e.code == 401: + return + except Exception as _: + pass + time.sleep(1) + raise RuntimeError(f"Solr did not respond within {max_time} seconds") + + +@dataclass_json +@dataclass +class Population(DataClassJsonMixin): + id: str + age: int + count: int + + +@dataclass_json +@dataclass +class City(DataClassJsonMixin): + id: str + city_name: str + population: List[Population] + + +def load_test_data() -> List[City]: + with open('../../test_setup/test_data.json') as f: + return City.schema().loads(f.read(), many=True) + + +async def index_test_data(context: SolrServerContext, name: str) -> None: + data = load_test_data() + update_builder = UpdateQueryBuilder() + await update_builder.execute(context, name, [City.to_dict(x) for x in data]) + + +async def setup_collection(context: SolrServerContext, name: str, config_path: str) -> None: + try: + await delete_collection(context, name) + except RuntimeError: + pass + try: + await delete_config(context, name) + except RuntimeError: + pass + await upload_config( + context, + name, + config_path, + ) + await create_collection(context, name, name, 1, 1) + + +async def teardown_collection(context: SolrServerContext, name: str) -> None: + try: + await delete_collection(context, name) + except RuntimeError: + pass + try: + await delete_config(context, name) + except RuntimeError: + pass diff --git a/wrappers/python/tests/test_alias.py b/wrappers/python/tests/test_alias.py new file mode 100644 index 0000000..8dd8c21 --- /dev/null +++ b/wrappers/python/tests/test_alias.py @@ -0,0 +1,67 @@ +import pytest + +from helpers import Config, create_config +from solrstice.alias import delete_alias, alias_exists, create_alias, create_alias_blocking, alias_exists_blocking, \ + delete_alias_blocking +from solrstice.collection import delete_collection, collection_exists, create_collection, delete_collection_blocking, \ + collection_exists_blocking, create_collection_blocking +from solrstice.config import delete_config, config_exists, upload_config, delete_config_blocking, \ + upload_config_blocking, config_exists_blocking + + +@pytest.fixture() +def config() -> Config: + yield create_config() + + +@pytest.mark.asyncio +async def test_alias_all_async_functions_exported(config: Config): + name = "AliasConfig" + + functions = [delete_alias_blocking, delete_collection_blocking, delete_config_blocking] + + for function in functions: + try: + function(config.context, name) + except RuntimeError: + pass + assert not await alias_exists(config.context, name) + assert not await collection_exists(config.context, name) + assert not await config_exists(config.context, name) + await upload_config( + config.context, + name, + config.config_path, + ) + await create_collection(config.context, name, name, 1, 1) + await create_alias(config.context, name, [name]) + assert await alias_exists(config.context, name) + await delete_alias(config.context, name) + await delete_collection(config.context, name) + await delete_config(config.context, name) + + +def test_alias_all_blocking_functions_exported(config: Config): + name = "AliasBlockingConfig" + + + functions = [delete_alias_blocking, delete_collection_blocking, delete_config_blocking] + + for function in functions: + try: + function(config.context, name) + except RuntimeError: + pass + assert not alias_exists_blocking(config.context, name) + assert not collection_exists_blocking(config.context, name) + assert not config_exists_blocking(config.context, name) + upload_config_blocking( + config.context, + name, + config.config_path, + ) + create_collection_blocking(config.context, name, name, 1, 1) + create_alias_blocking(config.context, name, [name]) + assert alias_exists_blocking(config.context, name) + delete_collection_blocking(config.context, name) + delete_config_blocking(config.context, name) diff --git a/wrappers/python/tests/test_clients.py b/wrappers/python/tests/test_clients.py new file mode 100644 index 0000000..a694070 --- /dev/null +++ b/wrappers/python/tests/test_clients.py @@ -0,0 +1,71 @@ +import asyncio + +import pytest + +from helpers import Config, create_config +from solrstice.clients import AsyncSolrCloudClient, BlockingSolrCloudClient +from solrstice.queries import UpdateQueryBuilder, SelectQueryBuilder, DeleteQueryBuilder + + +@pytest.fixture() +def config() -> Config: + yield create_config() + + +@pytest.mark.asyncio +async def test_async_client_works(config: Config): + name = "AsyncClientWorks" + + client = AsyncSolrCloudClient(config.context) + try: + await client.delete_collection(name) + except: + pass + try: + await client.delete_config(name) + except: + pass + + await client.upload_config(name, config.config_path) + await client.create_collection(name, name, shards=1, replication_factor=1) + await client.index(UpdateQueryBuilder(), name, [{'id': 'example_document'}]) + response = await client.select(SelectQueryBuilder(fq=['id:example_document']), name) + docs = response.get_response() + assert docs.num_found == 1 + + await client.delete(DeleteQueryBuilder(ids=['example_document']), name) + response = await client.select(SelectQueryBuilder(fq=['id:example_document']), name) + docs = response.get_response() + assert docs.num_found == 0 + + await client.delete_collection(name) + await client.delete_config(name) + + +def test_blocking_client_works(config: Config): + name = "BlockingClientWorks" + + client = BlockingSolrCloudClient(config.context) + try: + client.delete_collection(name) + except: + pass + try: + client.delete_config(name) + except: + pass + + client.upload_config(name, config.config_path) + client.create_collection(name, name, shards=1, replication_factor=1) + client.index(UpdateQueryBuilder(), name, [{'id': 'example_document'}]) + response = client.select(SelectQueryBuilder(fq=['id:example_document']), name) + docs = response.get_response() + assert docs.num_found == 1 + + client.delete(DeleteQueryBuilder(ids=['example_document']), name) + response = client.select(SelectQueryBuilder(fq=['id:example_document']), name) + docs = response.get_response() + assert docs.num_found == 0 + + client.delete_collection(name) + client.delete_config(name) diff --git a/wrappers/python/tests/test_collection.py b/wrappers/python/tests/test_collection.py new file mode 100644 index 0000000..eb98df7 --- /dev/null +++ b/wrappers/python/tests/test_collection.py @@ -0,0 +1,61 @@ +import pytest + +from helpers import Config, create_config +from solrstice.collection import delete_collection, collection_exists, create_collection, delete_collection_blocking, \ + collection_exists_blocking, create_collection_blocking +from solrstice.config import delete_config, config_exists, upload_config, delete_config_blocking, \ + upload_config_blocking, config_exists_blocking + + +@pytest.fixture() +def config() -> Config: + yield create_config() + + +@pytest.mark.asyncio +async def test_collection_all_async_functions_exported(config: Config): + name = "CollectionConfig" + + try: + await delete_collection(config.context, name) + except RuntimeError: + pass + try: + await delete_config(config.context, name) + except RuntimeError: + pass + assert not await collection_exists(config.context, name) + assert not await config_exists(config.context, name) + await upload_config( + config.context, + name, + config.config_path, + ) + await create_collection(config.context, name, name, 1, 1) + assert await collection_exists(config.context, name) + await delete_collection(config.context, name) + await delete_config(config.context, name) + + +def test_collection_all_blocking_functions_exported(config: Config): + name = "CollectionBlockingConfig" + + try: + delete_collection_blocking(config.context, name) + except RuntimeError: + pass + try: + delete_config_blocking(config.context, name) + except RuntimeError: + pass + assert not collection_exists_blocking(config.context, name) + assert not config_exists_blocking(config.context, name) + upload_config_blocking( + config.context, + name, + config.config_path, + ) + create_collection_blocking(config.context, name, name, 1, 1) + assert collection_exists_blocking(config.context, name) + delete_collection_blocking(config.context, name) + delete_config_blocking(config.context, name) diff --git a/wrappers/python/tests/test_config.py b/wrappers/python/tests/test_config.py new file mode 100644 index 0000000..2535bc2 --- /dev/null +++ b/wrappers/python/tests/test_config.py @@ -0,0 +1,41 @@ +import pytest + +from helpers import create_config, Config +from solrstice.config import upload_config, upload_config_blocking, delete_config, delete_config_blocking, \ + config_exists, config_exists_blocking + + +@pytest.fixture() +def config() -> Config: + yield create_config() + + +@pytest.mark.asyncio +async def test_config_all_async_functions_exported(config: Config): + try: + await delete_config(config.context, "UploadConfig") + except RuntimeError: + pass + assert not await config_exists(config.context, "UploadConfig") + await upload_config( + config.context, + "UploadConfig", + config.config_path, + ) + assert await config_exists(config.context, "UploadConfig") + await delete_config(config.context, "UploadConfig") + + +def test_config_all_blocking_functions_exported(config: Config): + try: + delete_config_blocking(config.context, "UploadConfig") + except RuntimeError: + pass + assert not config_exists_blocking(config.context, "UploadConfig") + upload_config_blocking( + config.context, + "UploadConfig", + config.config_path, + ) + assert config_exists_blocking(config.context, "UploadConfig") + delete_config_blocking(config.context, "UploadConfig") diff --git a/wrappers/python/tests/test_group.py b/wrappers/python/tests/test_group.py new file mode 100644 index 0000000..d005b40 --- /dev/null +++ b/wrappers/python/tests/test_group.py @@ -0,0 +1,31 @@ +import pytest + +from helpers import Config, create_config, wait_for_solr, setup_collection, teardown_collection, index_test_data +from solrstice.group import GroupingComponent +from solrstice.queries import SelectQueryBuilder + + +@pytest.fixture() +def config() -> Config: + yield create_config() + + +@pytest.mark.asyncio +async def test_get_field_result_works(config: Config): + name = "GroupFieldQuery" + wait_for_solr(config.solr_host, 30) + + try: + await setup_collection(config.context, name, config.config_path) + await index_test_data(config.context, name) + + group_builder = GroupingComponent(fields=['age'], limit=10) + select_builder = SelectQueryBuilder(fq=["age:[* TO *]"], grouping=group_builder) + groups = (await select_builder.execute(config.context, name)).get_groups() + age_group = groups["age"] + group = age_group.get_field_result() + assert age_group.n_groups is None + assert age_group.matches > 0 + assert len(group) > 0 + finally: + await teardown_collection(config.context, name) diff --git a/wrappers/python/tests/test_hosts.py b/wrappers/python/tests/test_hosts.py new file mode 100644 index 0000000..e6107f7 --- /dev/null +++ b/wrappers/python/tests/test_hosts.py @@ -0,0 +1,70 @@ +import os +from dataclasses import dataclass +from pathlib import Path +from typing import Optional + +from dotenv import load_dotenv +import pytest + +from helpers import wait_for_solr +from solrstice.auth import SolrBasicAuth +from solrstice.config import get_configs, get_configs_blocking +from solrstice.hosts import SolrServerContext, ZookeeperEnsembleHostConnector, SolrSingleServerHost, \ + SolrMultipleServerHost + + +@dataclass +class Config: + host: str + zookeeper_host: str + auth: Optional[SolrBasicAuth] + + +@pytest.fixture() +def config() -> Config: + path = Path('../../test_setup/.env').resolve() + load_dotenv(path) + solr_auth = None + if os.getenv('SOLR_USERNAME') is not None and os.getenv('SOLR_PASSWORD') is not '': + solr_auth = SolrBasicAuth( + os.getenv('SOLR_USERNAME'), + os.getenv('SOLR_PASSWORD'), + ) + host = os.getenv('SOLR_HOST') + + yield Config(host, os.getenv('ZK_HOST'), solr_auth) + + +@pytest.mark.asyncio +async def test_zookeeper_connection_works(config: Config): + wait_for_solr(config.host, 30) + context = SolrServerContext(await ZookeeperEnsembleHostConnector([config.zookeeper_host], 30).connect(), + config.auth) + await get_configs(context) + + +def test_zookeeper_connection_works_blocking(config: Config): + wait_for_solr(config.host, 30) + context = SolrServerContext(ZookeeperEnsembleHostConnector([config.zookeeper_host], 30).connect_blocking(), + config.auth) + get_configs_blocking(context) + + +@pytest.mark.asyncio +async def test_solr_single_server_works(config: Config): + wait_for_solr(config.host, 30) + context = SolrServerContext(SolrSingleServerHost(config.host), config.auth) + await get_configs(context) + + +@pytest.mark.asyncio +async def test_multiple_server_works(config: Config): + wait_for_solr(config.host, 30) + context = SolrServerContext(SolrMultipleServerHost([config.host], 5), config.auth) + await get_configs(context) + + +def test_solr_multiple_server_works_blocking(config: Config): + wait_for_solr(config.host, 30) + context = SolrServerContext(SolrMultipleServerHost([config.host], 5), config.auth) + get_configs_blocking(context) diff --git a/wrappers/python/tests/test_index.py b/wrappers/python/tests/test_index.py new file mode 100644 index 0000000..3d1b256 --- /dev/null +++ b/wrappers/python/tests/test_index.py @@ -0,0 +1,22 @@ +import pytest + +from helpers import Config, wait_for_solr, setup_collection, teardown_collection, create_config +from solrstice.queries import UpdateQueryBuilder, CommitType + + +@pytest.fixture() +def config() -> Config: + yield create_config() + + +@pytest.mark.asyncio +async def test_index_indexes_documents(config: Config): + name = "IndexIndexesDocuments" + wait_for_solr(config.solr_host, 30) + + try: + await setup_collection(config.context, name, config.config_path) + + await UpdateQueryBuilder(handler='update', commit_type=CommitType.Soft).execute(config.context, name, [{'id': 'test'}]) + finally: + await teardown_collection(config.context, name) diff --git a/wrappers/python/tests/test_latency.py b/wrappers/python/tests/test_latency.py new file mode 100644 index 0000000..8d05b66 --- /dev/null +++ b/wrappers/python/tests/test_latency.py @@ -0,0 +1,60 @@ +# Test that the GIL is not blocked while waiting for a response. +import asyncio +import time +from multiprocessing.pool import ThreadPool +from typing import Optional + +import pytest + +from helpers import Config, create_config +from solrstice.auth import SolrBasicAuth +from solrstice.clients import BlockingSolrCloudClient, AsyncSolrCloudClient +from solrstice.hosts import SolrSingleServerHost, SolrServerContext + + +@pytest.fixture() +def config() -> Config: + yield create_config() + + +def test_blocking_client_does_not_block_gil_config(config: Config): + processes = 8 + if not config.speedbump_host: + pytest.skip("No speedbump host configured") + with ThreadPool(processes) as pool: + start_time = time.time() + tasks = [ + pool.apply_async(get_configs_blocking, (config.speedbump_host, config.solr_username, config.solr_password)) + for _ in range(processes)] + for task in tasks: + task.get(timeout=processes * 3) + + elapsed_seconds = time.time() - start_time + # Presume since the latency is 2s, that it should have completed in max 1s, given no GIL blocking. + assert elapsed_seconds < processes + + +@pytest.mark.asyncio +async def test_async_client_does_not_block_event_loop(config: Config): + processes = 8 + if not config.speedbump_host: + pytest.skip("No speedbump host configured") + start_time = time.time() + tasks = [asyncio.create_task(get_configs_async(config.speedbump_host, config.solr_username, config.solr_password)) + for _ in range(processes)] + await asyncio.gather(*tasks) + elapsed_seconds = time.time() - start_time + # Presume since the latency is 2s, that it should have completed in max 1s, given no GIL blocking. + assert elapsed_seconds < processes + + +def get_configs_blocking(host: str, username: Optional[str], password: Optional[str]): + auth = None if not username else SolrBasicAuth(username, password) + client = BlockingSolrCloudClient(SolrServerContext(SolrSingleServerHost(host), auth)) + return client.get_configs() + + +async def get_configs_async(host: str, username: Optional[str], password: Optional[str]): + auth = None if not username else SolrBasicAuth(username, password) + client = AsyncSolrCloudClient(SolrServerContext(SolrSingleServerHost(host), auth)) + return await client.get_configs() diff --git a/wrappers/python/tests/test_multiprocessing.py b/wrappers/python/tests/test_multiprocessing.py new file mode 100644 index 0000000..dd5d977 --- /dev/null +++ b/wrappers/python/tests/test_multiprocessing.py @@ -0,0 +1,97 @@ +import dataclasses +import random +from multiprocessing import Pool +from multiprocessing.pool import ThreadPool +from typing import Optional + +import pytest + +from helpers import Config, create_config +from solrstice.auth import SolrBasicAuth +from solrstice.clients import BlockingSolrCloudClient +from solrstice.hosts import SolrServerContext, SolrSingleServerHost +from solrstice.queries import UpdateQueryBuilder + + +@pytest.fixture() +def config() -> Config: + yield create_config() + + +@dataclasses.dataclass +class PickableConfig: + solr_host: str + solr_username: Optional[str] + solr_password: Optional[str] + + +def create_client(config: PickableConfig): + auth = None if not config.solr_username else SolrBasicAuth(config.solr_username, config.solr_password) + return BlockingSolrCloudClient(SolrServerContext(SolrSingleServerHost(config.solr_host), auth)) + + +def index_independent(config: PickableConfig, collection_name): + client = create_client(config) + client.index(UpdateQueryBuilder(), collection_name, [{'id': str(random.randint(0, 10000000000))}]) + +# TODO This does not work, and blocks forever +# def test_blocking_multiprocessing_works(config: Config): +# name = "BlockingMultiprocessingWorks" +# +# pickable_config = PickableConfig(config.solr_host, config.solr_username, config.solr_password) +# client = create_client(pickable_config) +# try: +# client.delete_collection(name) +# except: +# pass +# try: +# client.delete_config(name) +# except: +# pass +# +# client.upload_config(name, config.config_path) +# client.create_collection(name, name, shards=1, replication_factor=1) +# +# with Pool(processes=4) as pool: +# tasks = [pool.apply_async(index_independent, (pickable_config, name)) for _ in range(10)] +# [task.get(15) for task in tasks] +# +# try: +# client.delete_collection(name) +# except: +# pass +# try: +# client.delete_config(name) +# except: +# pass + + +def test_blocking_multithreading_works(config: Config): + name = "BlockingMultithreadingWorks" + + pickable_config = PickableConfig(config.solr_host, config.solr_username, config.solr_password) + client = create_client(pickable_config) + try: + client.delete_collection(name) + except: + pass + try: + client.delete_config(name) + except: + pass + + client.upload_config(name, config.config_path) + client.create_collection(name, name, shards=1, replication_factor=1) + + with ThreadPool(processes=4) as pool: + tasks = [pool.apply_async(index_independent, (pickable_config, name)) for _ in range(10)] + [task.get(15) for task in tasks] + + try: + client.delete_collection(name) + except: + pass + try: + client.delete_config(name) + except: + pass \ No newline at end of file diff --git a/wrappers/python/tests/test_pickle.py b/wrappers/python/tests/test_pickle.py new file mode 100644 index 0000000..171704c --- /dev/null +++ b/wrappers/python/tests/test_pickle.py @@ -0,0 +1,29 @@ +import pickle + +from solrstice.group import GroupingComponent +from solrstice.queries import SelectQueryBuilder, UpdateQueryBuilder, CommitType, DeleteQueryBuilder + + +def test_pickle_works_select_query_builder(): + builder = SelectQueryBuilder(fq=['test', 'test'], grouping=GroupingComponent(fields=['test'], main=True, facet=False)) + string = pickle.dumps(builder) + builder_copy: SelectQueryBuilder = pickle.loads(string) + assert builder_copy.fq == builder.fq + assert builder_copy.grouping.fields == builder.grouping.fields + assert builder_copy.grouping.main == builder.grouping.main + assert builder_copy.grouping.facet == builder.grouping.facet + + +def test_pickle_works_update_query_builder(): + builder = UpdateQueryBuilder(handler='test', commit_type=CommitType.Soft) + string = pickle.dumps(builder) + builder_copy: UpdateQueryBuilder = pickle.loads(string) + assert builder_copy.handler == builder.handler + assert builder_copy.commit_type == builder.commit_type + + +def test_pickle_works_delete_query_builder(): + builder = DeleteQueryBuilder() + string = pickle.dumps(builder) + builder_copy: DeleteQueryBuilder = pickle.loads(string) + assert builder_copy.handler == builder.handler diff --git a/wrappers/python/tests/test_select.py b/wrappers/python/tests/test_select.py new file mode 100644 index 0000000..ea559f3 --- /dev/null +++ b/wrappers/python/tests/test_select.py @@ -0,0 +1,78 @@ +import json + +import pytest + +from helpers import Config, create_config, wait_for_solr, index_test_data, setup_collection, teardown_collection +from solrstice.queries import SelectQueryBuilder + + +@pytest.fixture() +def config() -> Config: + yield create_config() + + +@pytest.mark.asyncio +async def test_get_response_gets_response(config: Config): + name = "SelectGetResponse" + wait_for_solr(config.solr_host, 30) + + try: + await setup_collection(config.context, name, config.config_path) + + await index_test_data(config.context, name) + + pass + builder = SelectQueryBuilder() + solr_response = await builder.execute(config.context, name) + docs_response = solr_response.get_response() + assert docs_response.num_found > 0 + assert docs_response.start == 0 + assert len(docs_response.docs) > 4 + finally: + await teardown_collection(config.context, name) + + +@pytest.mark.asyncio +async def test_select_works_when_no_result(config: Config): + name = "SelectNoResult" + wait_for_solr(config.solr_host, 30) + + try: + await setup_collection(config.context, name, config.config_path) + + await index_test_data(config.context, name) + + builder = SelectQueryBuilder(fq=['id:non_existent_id']) + solr_response = await builder.execute(config.context, name) + docs_response = solr_response.get_response() + assert docs_response.num_found == 0 + assert docs_response.start == 0 + assert len(docs_response.docs) == 0 + finally: + await teardown_collection(config.context, name) + +@pytest.mark.asyncio +async def test_select_works_with_cursor_mark(config: Config): + name = "SelectCursorMark" + wait_for_solr(config.solr_host, 30) + + try: + await setup_collection(config.context, name, config.config_path) + await index_test_data(config.context, name) + cursor_mark = "*" + current_iteration = 0 + while True: + if current_iteration > 100: + raise Exception("Cursor mark test failed. Too many iterations") + builder = SelectQueryBuilder(fq=['age:[* TO *]'], rows=1, sort=['id desc'], cursor_mark=cursor_mark) + result = await builder.execute(config.context, name) + if result.next_cursor_mark is not None: + if cursor_mark == "*": + break + cursor_mark = result.next_cursor_mark + else: + raise Exception("Cursor mark test failed. No next cursor mark") + current_iteration += 1 + + finally: + await teardown_collection(config.context, name) \ No newline at end of file