From c16c4aadc555c490060d627bebc158a3ba1cd179 Mon Sep 17 00:00:00 2001 From: Abdulla Abdurakhmanov Date: Fri, 12 Jul 2024 13:34:09 +0200 Subject: [PATCH] Hyper v1 and Google Cloud SDK 0.25 support (#185) * Hyper v1 and Google Cloud SDK 0.25 support * Fix clippy warnings --- .github/workflows/tests.yml | 8 +-- Cargo.toml | 10 ++-- README.md | 93 +++++++++++++++-------------- src/firestore_serde/deserializer.rs | 4 +- tests/common/mod.rs | 3 +- 5 files changed, 63 insertions(+), 55 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 41a5e8e..87d3624 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,8 +1,8 @@ name: tests & formatting -on: +on: push: pull_request: - types: [opened] + types: [ opened ] workflow_dispatch: env: GCP_PROJECT: latestbit @@ -37,8 +37,8 @@ jobs: - name: 'Checking formatting and clippy' run: cargo fmt -- --check && cargo clippy -- -Dwarnings - name: 'Run lib tests' - run: cargo test --lib --all-features + run: cargo test --lib --features "caching-memory,caching-persistent" if: github.ref != 'refs/heads/master' - name: 'Run all tests' - run: cargo test --all-features + run: cargo test --features "caching-memory,caching-persistent" if: github.ref == 'refs/heads/master' diff --git a/Cargo.toml b/Cargo.toml index aa95851..798643f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -22,19 +22,21 @@ name = "firestore" path = "src/lib.rs" [features] -default = [] +default = ["tls-roots"] caching = [] caching-memory = ["caching", "dep:moka"] caching-persistent = ["caching", "dep:redb"] +tls-roots = ["gcloud-sdk/tls-roots"] +tls-webpki-roots = ["gcloud-sdk/tls-webpki-roots"] [dependencies] tracing = "0.1" -gcloud-sdk = { version = "0.24.5", features = ["google-firestore-v1"] } -hyper = { version = "0.14" } +gcloud-sdk = { version = "0.25", default-features = false, features = ["google-firestore-v1"] } +hyper = { version = "1" } struct-path = "0.2" rvstruct = "0.3.2" rsb_derive = "0.5" -serde = { version = "1.0", features = ["derive"] } +serde = { version = "1", features = ["derive"] } tokio = { version = "1" } tokio-stream = "0.1" futures = "0.3" diff --git a/README.md b/README.md index b0e3ffc..92dcd5f 100644 --- a/README.md +++ b/README.md @@ -37,7 +37,7 @@ Cargo.toml: ```toml [dependencies] -firestore = "0.42" +firestore = "0.43" ``` ## Examples @@ -177,8 +177,8 @@ let object_stream: BoxStream > = db.fluent() .from(TEST_COLLECTION_NAME) .filter( | q| { // Fluent filter API example q.for_all([ - q.field(path! (MyTestStructure::some_num)).is_not_null(), - q.field(path ! (MyTestStructure::some_string)).eq("Test"), + q.field(path!(MyTestStructure::some_num)).is_not_null(), + q.field(path!(MyTestStructure::some_string)).eq("Test"), // Sometimes you have optional filters Some("Test2") .and_then( | value | q.field(path ! (MyTestStructure::one_more_string)).eq(value)), @@ -272,21 +272,21 @@ You can work with nested collections specifying path/location to a parent for do db.fluent() .insert() .into(TEST_PARENT_COLLECTION_NAME) - .document_id( & parent_struct.some_id) - .object( & parent_struct) + .document_id(&parent_struct.some_id) + .object(&parent_struct) .execute() .await?; // The doc path where we store our children -let parent_path = db.parent_path(TEST_PARENT_COLLECTION_NAME, parent_struct.some_id) ?; +let parent_path = db.parent_path(TEST_PARENT_COLLECTION_NAME, parent_struct.some_id)?; // Create a child doc db.fluent() .insert() .into(TEST_CHILD_COLLECTION_NAME) - .document_id( & child_struct.some_id) - .parent( & parent_path) - .object( & child_struct) + .document_id(&child_struct.some_id) + .parent(&parent_path) + .object(&child_struct) .execute() .await?; @@ -309,9 +309,9 @@ You can nest multiple levels of collections using `at()`: ```rust let parent_path = -db.parent_path(TEST_PARENT_COLLECTION_NAME, "parent-id") ? - .at(TEST_CHILD_COLLECTION_NAME, "child-id") ? - .at(TEST_GRANDCHILD_COLLECTION_NAME, "grand-child-id") ?; +db.parent_path(TEST_PARENT_COLLECTION_NAME, "parent-id")? + .at(TEST_CHILD_COLLECTION_NAME, "child-id")? + .at(TEST_GRANDCHILD_COLLECTION_NAME, "grand-child-id")?; ``` ## Transactions @@ -355,7 +355,7 @@ You may also execute transactions that automatically retry with exponential back .by_id_in(TEST_COLLECTION_NAME) .obj() .one(TEST_DOCUMENT_ID) - .await ? + .await? .expect("Missing document"); // Perform some kind of operation that depends on the state of the document @@ -363,12 +363,12 @@ You may also execute transactions that automatically retry with exponential back db.fluent() .update() - .fields(paths ! (MyTestStructure::{ + .fields(paths!(MyTestStructure::{ test_string })) .in_col(TEST_COLLECTION_NAME) .document_id(TEST_DOCUMENT_ID) - .object( & test_structure) + .object(&test_structure) .add_to_transaction(transaction) ?; Ok(()) @@ -455,11 +455,11 @@ db.fluent() .update() .in_col(TEST_COLLECTION_NAME) .document_id("test-4") -.transforms( | t| { // Transformations +.transforms(|t| { // Transformations t.fields([ - t.field(path! (MyTestStructure::some_num)).increment(10), - t.field(path ! (MyTestStructure::some_array)).append_missing_elements([4, 5]), - t.field(path! (MyTestStructure::some_array)).remove_all_from_array([3]), + t.field(path!(MyTestStructure::some_num)).increment(10), + t.field(path!(MyTestStructure::some_array)).append_missing_elements([4, 5]), + t.field(path!(MyTestStructure::some_array)).remove_all_from_array([3]), ]) }) .only_transform() @@ -470,13 +470,13 @@ db.fluent() .update() .in_col(TEST_COLLECTION_NAME) .document_id("test-5") -.object( & my_obj) // Updating the objects with the fields here -.transforms( | t| { // Transformations after the update +.object(&my_obj) // Updating the objects with the fields here +.transforms(|t| { // Transformations after the update t.fields([ - t.field(path! (MyTestStructure::some_num)).increment(10), + t.field(path!(MyTestStructure::some_num)).increment(10), ]) }) -.add_to_transaction( & mut transaction) ?; // or add_to_batch +.add_to_transaction(&mut transaction) ?; // or add_to_batch ``` ## Listening the document changes on Firestore @@ -505,14 +505,14 @@ db.fluent() .select() .from(TEST_COLLECTION_NAME) .listen() -.add_target(TEST_TARGET_ID_BY_QUERY, & mut listener) ?; +.add_target(TEST_TARGET_ID_BY_QUERY, &mut listener) ?; // Adding docs listener by IDs db.fluent() .select() .by_id_in(TEST_COLLECTION_NAME) .batch_listen([doc_id1, doc_id2]) -.add_target(TEST_TARGET_ID_BY_DOC_IDS, & mut listener) ?; +.add_target(TEST_TARGET_ID_BY_DOC_IDS, &mut listener) ?; listener .start( | event| async move { @@ -577,7 +577,7 @@ The library supports the aggregation functions for the queries: db.fluent() .select() .from(TEST_COLLECTION_NAME) - .aggregate( | a| a.fields([a.field(path!(MyAggTestStructure::counter)).count()])) + .aggregate(|a| a.fields([a.field(path!(MyAggTestStructure::counter)).count()])) .obj() .query() .await?; @@ -710,24 +710,24 @@ Update cache is done in the following cases: ```rust // Create an instance -let db = FirestoreDb::new( & config_env_var("PROJECT_ID") ? ).await?; +let db = FirestoreDb::new( &config_env_var("PROJECT_ID") ? ).await?; const TEST_COLLECTION_NAME: &'static str = "test-caching"; // Create a cache instance that also creates an internal Firestore listener let mut cache = FirestoreCache::new( "example-mem-cache".into(), -& db, +&db, FirestoreMemoryCacheBackend::new( FirestoreCacheConfiguration::new().add_collection_config( - & db, + &db, FirestoreCacheCollectionConfiguration::new( TEST_COLLECTION_NAME, FirestoreListenerTarget::new(1000), FirestoreCacheCollectionLoadMode::PreloadNone, ) ), -) ?, +)?, FirestoreMemListenStateStorage::new(), ) .await?; @@ -736,28 +736,33 @@ FirestoreMemoryCacheBackend::new( cache.load().await?; // Required even if you don't preload anything // Read a document through the cache. If it is not found in the cache, it will be loaded from Firestore and cached. -let my_struct0: Option = db.read_through_cache( & cache) -.fluent() -.select() -.by_id_in(TEST_COLLECTION_NAME) -.obj() -.one("test-1") -.await?; +let my_struct0: Option = db.read_through_cache(&cache) + .fluent() + .select() + .by_id_in(TEST_COLLECTION_NAME) + .obj() + .one("test-1") + .await?; // Read a document only from the cache. If it is not found in the cache, it will return None. -let my_struct0: Option = db.read_cached_only( & cache) -.fluent() -.select() -.by_id_in(TEST_COLLECTION_NAME) -.obj() -.one("test-1") -.await?; +let my_struct0: Option = db.read_cached_only(&cache) + .fluent() + .select() + .by_id_in(TEST_COLLECTION_NAME) + .obj() + .one("test-1") + .await?; ``` Full examples available [here](examples/caching_memory_collections.rs) and [here](examples/caching_persistent_collections.rs). +## TLS related features +Cargo provides support for different TLS features for dependencies: +- `tls-roots`: default feature to support native TLS roots +- `tls-webpki-roots`: feature to switch to webpki crate roots + ## How this library is tested There are integration tests in the tests directory that runs for every commit against the real diff --git a/src/firestore_serde/deserializer.rs b/src/firestore_serde/deserializer.rs index 9c32903..0193dd5 100644 --- a/src/firestore_serde/deserializer.rs +++ b/src/firestore_serde/deserializer.rs @@ -651,7 +651,7 @@ where fields.insert( "_firestore_created".to_string(), gcloud_sdk::google::firestore::v1::Value { - value_type: Some(value::ValueType::TimestampValue(created_time.clone())), + value_type: Some(value::ValueType::TimestampValue(*created_time)), }, ); } @@ -660,7 +660,7 @@ where fields.insert( "_firestore_updated".to_string(), gcloud_sdk::google::firestore::v1::Value { - value_type: Some(value::ValueType::TimestampValue(updated_time.clone())), + value_type: Some(value::ValueType::TimestampValue(*updated_time)), }, ); } diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 9578a63..16c6fe6 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -15,7 +15,8 @@ pub fn config_env_var(name: &str) -> Result { #[allow(dead_code)] pub async fn setup() -> Result> { // Logging with debug enabled - let filter = tracing_subscriber::EnvFilter::builder().parse("info,firestore=debug")?; + let filter = + tracing_subscriber::EnvFilter::builder().parse("info,firestore=debug,gcloud_sdk=debug")?; let subscriber = tracing_subscriber::fmt().with_env_filter(filter).finish(); tracing::subscriber::set_global_default(subscriber)?;