From 5e2d0ad2f486b865c6137c9785de38b26ae889d2 Mon Sep 17 00:00:00 2001 From: erhant Date: Mon, 4 Nov 2024 10:59:21 +0300 Subject: [PATCH 1/3] `expect` handlings --- src/api_interface/openai_api.rs | 15 +++++++-------- src/memory/files.rs | 2 +- src/program/errors.rs | 4 ++++ src/tools/browserless.rs | 3 +-- src/tools/serper.rs | 2 +- 5 files changed, 14 insertions(+), 12 deletions(-) diff --git a/src/api_interface/openai_api.rs b/src/api_interface/openai_api.rs index ca2b23a..ea10f59 100644 --- a/src/api_interface/openai_api.rs +++ b/src/api_interface/openai_api.rs @@ -42,21 +42,20 @@ impl OpenAIExecutor { let messages = vec![ChatMessageBuilder::default() .content(ChatMessageContent::Text(prompt.to_string())) .build() - .expect("OpenAI function call message build error")]; + .map_err(|e| OllamaError::from(format!("Could not build chat message: {:?}", e)))?]; let parameters = ChatCompletionParametersBuilder::default() .model(self.model.clone()) .messages(messages) .tools(openai_tools) .build() - .expect("Error while building tools."); + .map_err(|e| { + OllamaError::from(format!("Could not build message parameters: {:?}", e)) + })?; - let result = self - .client - .chat() - .create(parameters) - .await - .expect("OpenAI Function call failed"); + let result = self.client.chat().create(parameters).await.map_err(|e| { + OllamaError::from(format!("Failed to parse Gemini API response: {:?}", e)) + })?; let message = result.choices[0].message.clone(); if raw_mode { diff --git a/src/memory/files.rs b/src/memory/files.rs index a366833..85f0e8e 100644 --- a/src/memory/files.rs +++ b/src/memory/files.rs @@ -105,7 +105,7 @@ impl Embedder for OpenAIEmbedder { .input(EmbeddingInput::String(_prompt.to_string())) .encoding_format(EmbeddingEncodingFormat::Float) .build() - .expect("Error building OpenAI embedder"); + .map_err(|e| EmbeddingError::BuilderError(e))?; let result = client.embeddings().create(parameters).await; diff --git a/src/program/errors.rs b/src/program/errors.rs index 751d468..0d563e4 100644 --- a/src/program/errors.rs +++ b/src/program/errors.rs @@ -1,6 +1,8 @@ use std::error::Error; use std::fmt; +use openai_dive::v1::resources::embedding::EmbeddingParametersBuilderError; + #[allow(clippy::enum_variant_names)] #[derive(Debug)] pub enum CustomError { @@ -25,6 +27,7 @@ pub enum EmbeddingError { DocumentEmbedding(String), QueryEmbedding(String), ModelDoesNotExist, + BuilderError(EmbeddingParametersBuilderError), } #[derive(Debug)] @@ -78,6 +81,7 @@ impl fmt::Display for EmbeddingError { EmbeddingError::DocumentEmbedding(doc) => write!(f, "Error while generating embeddings for doc: {}", doc), EmbeddingError::QueryEmbedding(query) => write!(f, "Error while generating embeddings for query: {}", query), EmbeddingError::ModelDoesNotExist => write!(f, "Model does not exist. run ollama run hellord/mxbai-embed-large-v1:f16 to create it."), + EmbeddingError::BuilderError(err) => write!(f, "Embedding builder error: {}", err), } } } diff --git a/src/tools/browserless.rs b/src/tools/browserless.rs index 9f9b0bb..0dede53 100644 --- a/src/tools/browserless.rs +++ b/src/tools/browserless.rs @@ -35,8 +35,7 @@ impl Tool for Browserless { async fn run(&self, input: Value) -> Result> { let website = input["website"].as_str().ok_or("Website URL is required")?; - let browserless_token = - env::var("BROWSERLESS_TOKEN").expect("BROWSERLESS_TOKEN must be set"); + let browserless_token = env::var("BROWSERLESS_TOKEN")?; let url = format!("http://0.0.0.0:3000/content?token={}", browserless_token); let payload = json!({ "url": website diff --git a/src/tools/serper.rs b/src/tools/serper.rs index 5d0f25c..221a304 100644 --- a/src/tools/serper.rs +++ b/src/tools/serper.rs @@ -254,7 +254,7 @@ impl Tool for SearchTool { } let client = Client::new(); - let api_key = env::var("SERPER_API_KEY").expect("SERPER_API_KEY must be set"); + let api_key = env::var("SERPER_API_KEY")?; let response = client .post(&url) .header("X-API-KEY", api_key) From 06965374e40e4a72804bf2892889e9461a87534d Mon Sep 17 00:00:00 2001 From: erhant Date: Mon, 4 Nov 2024 11:02:28 +0300 Subject: [PATCH 2/3] fix lint --- src/memory/files.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/memory/files.rs b/src/memory/files.rs index 85f0e8e..2fd2b89 100644 --- a/src/memory/files.rs +++ b/src/memory/files.rs @@ -105,7 +105,7 @@ impl Embedder for OpenAIEmbedder { .input(EmbeddingInput::String(_prompt.to_string())) .encoding_format(EmbeddingEncodingFormat::Float) .build() - .map_err(|e| EmbeddingError::BuilderError(e))?; + .map_err(EmbeddingError::BuilderError)?; let result = client.embeddings().create(parameters).await; From 32d70eb7ee9527f3835c9e0637a6d32018d4581d Mon Sep 17 00:00:00 2001 From: erhant Date: Mon, 4 Nov 2024 11:04:22 +0300 Subject: [PATCH 3/3] typo fix --- src/api_interface/openai_api.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/api_interface/openai_api.rs b/src/api_interface/openai_api.rs index ea10f59..abb8e76 100644 --- a/src/api_interface/openai_api.rs +++ b/src/api_interface/openai_api.rs @@ -54,7 +54,7 @@ impl OpenAIExecutor { })?; let result = self.client.chat().create(parameters).await.map_err(|e| { - OllamaError::from(format!("Failed to parse Gemini API response: {:?}", e)) + OllamaError::from(format!("Failed to parse OpenAI API response: {:?}", e)) })?; let message = result.choices[0].message.clone();