Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Get rid of lombok for langchain4j-azure-open-ai-spring-boot-sarter #51

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 0 additions & 7 deletions langchain4j-azure-open-ai-spring-boot-starter/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -34,13 +34,6 @@
<optional>true</optional>
</dependency>

<!-- should be listed before spring-boot-configuration-processor -->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<scope>provided</scope>
</dependency>

<!-- needed to generate automatic metadata about available config properties -->
<dependency>
<groupId>org.springframework.boot</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,30 +30,30 @@ AzureOpenAiChatModel openAiChatModelByNonAzureApiKey(Properties properties) {
}

AzureOpenAiChatModel openAiChatModel(Properties properties) {
ChatModelProperties chatModelProperties = properties.getChatModel();
ChatModelProperties chatModelProperties = properties.chatModel();
AzureOpenAiChatModel.Builder builder = AzureOpenAiChatModel.builder()
.endpoint(chatModelProperties.getEndpoint())
.serviceVersion(chatModelProperties.getServiceVersion())
.apiKey(chatModelProperties.getApiKey())
.deploymentName(chatModelProperties.getDeploymentName())
.endpoint(chatModelProperties.endpoint())
.serviceVersion(chatModelProperties.serviceVersion())
.apiKey(chatModelProperties.apiKey())
.deploymentName(chatModelProperties.deploymentName())
// TODO inject tokenizer?
.maxTokens(chatModelProperties.getMaxTokens())
.temperature(chatModelProperties.getTemperature())
.topP(chatModelProperties.getTopP())
.logitBias(chatModelProperties.getLogitBias())
.user(chatModelProperties.getUser())
.stop(chatModelProperties.getStop())
.presencePenalty(chatModelProperties.getPresencePenalty())
.frequencyPenalty(chatModelProperties.getFrequencyPenalty())
.seed(chatModelProperties.getSeed())
.timeout(Duration.ofSeconds(chatModelProperties.getTimeout() == null ? 0 : chatModelProperties.getTimeout()))
.maxRetries(chatModelProperties.getMaxRetries())
.maxTokens(chatModelProperties.maxTokens())
.temperature(chatModelProperties.temperature())
.topP(chatModelProperties.topP())
.logitBias(chatModelProperties.logitBias())
.user(chatModelProperties.user())
.stop(chatModelProperties.stop())
.presencePenalty(chatModelProperties.presencePenalty())
.frequencyPenalty(chatModelProperties.frequencyPenalty())
.seed(chatModelProperties.seed())
.timeout(Duration.ofSeconds(chatModelProperties.timeout() == null ? 0 : chatModelProperties.timeout()))
.maxRetries(chatModelProperties.maxRetries())
.proxyOptions(ProxyOptions.fromConfiguration(Configuration.getGlobalConfiguration()))
.logRequestsAndResponses(chatModelProperties.getLogRequestsAndResponses() != null && chatModelProperties.getLogRequestsAndResponses())
.userAgentSuffix(chatModelProperties.getUserAgentSuffix())
.customHeaders(chatModelProperties.getCustomHeaders());
if (chatModelProperties.getNonAzureApiKey() != null) {
builder.nonAzureApiKey(chatModelProperties.getNonAzureApiKey());
.logRequestsAndResponses(chatModelProperties.logRequestsAndResponses() != null && chatModelProperties.logRequestsAndResponses())
.userAgentSuffix(chatModelProperties.userAgentSuffix())
.customHeaders(chatModelProperties.customHeaders());
if (chatModelProperties.nonAzureApiKey() != null) {
builder.nonAzureApiKey(chatModelProperties.nonAzureApiKey());
}
return builder.build();
}
Expand All @@ -71,30 +71,30 @@ AzureOpenAiStreamingChatModel openAiStreamingChatModelByNonAzureApiKey(Propertie
}

AzureOpenAiStreamingChatModel openAiStreamingChatModel(Properties properties) {
ChatModelProperties chatModelProperties = properties.getStreamingChatModel();
ChatModelProperties chatModelProperties = properties.streamingChatModel();
AzureOpenAiStreamingChatModel.Builder builder = AzureOpenAiStreamingChatModel.builder()
.endpoint(chatModelProperties.getEndpoint())
.serviceVersion(chatModelProperties.getServiceVersion())
.apiKey(chatModelProperties.getApiKey())
.deploymentName(chatModelProperties.getDeploymentName())
.endpoint(chatModelProperties.endpoint())
.serviceVersion(chatModelProperties.serviceVersion())
.apiKey(chatModelProperties.apiKey())
.deploymentName(chatModelProperties.deploymentName())
// TODO inject tokenizer?
.maxTokens(chatModelProperties.getMaxTokens())
.temperature(chatModelProperties.getTemperature())
.topP(chatModelProperties.getTopP())
.logitBias(chatModelProperties.getLogitBias())
.user(chatModelProperties.getUser())
.stop(chatModelProperties.getStop())
.presencePenalty(chatModelProperties.getPresencePenalty())
.frequencyPenalty(chatModelProperties.getFrequencyPenalty())
.seed(chatModelProperties.getSeed())
.timeout(Duration.ofSeconds(chatModelProperties.getTimeout() == null ? 0 : chatModelProperties.getTimeout()))
.maxRetries(chatModelProperties.getMaxRetries())
.maxTokens(chatModelProperties.maxTokens())
.temperature(chatModelProperties.temperature())
.topP(chatModelProperties.topP())
.logitBias(chatModelProperties.logitBias())
.user(chatModelProperties.user())
.stop(chatModelProperties.stop())
.presencePenalty(chatModelProperties.presencePenalty())
.frequencyPenalty(chatModelProperties.frequencyPenalty())
.seed(chatModelProperties.seed())
.timeout(Duration.ofSeconds(chatModelProperties.timeout() == null ? 0 : chatModelProperties.timeout()))
.maxRetries(chatModelProperties.maxRetries())
.proxyOptions(ProxyOptions.fromConfiguration(Configuration.getGlobalConfiguration()))
.logRequestsAndResponses(chatModelProperties.getLogRequestsAndResponses() != null && chatModelProperties.getLogRequestsAndResponses())
.userAgentSuffix(chatModelProperties.getUserAgentSuffix())
.customHeaders(chatModelProperties.getCustomHeaders());
if (chatModelProperties.getNonAzureApiKey() != null) {
builder.nonAzureApiKey(chatModelProperties.getNonAzureApiKey());
.logRequestsAndResponses(chatModelProperties.logRequestsAndResponses() != null && chatModelProperties.logRequestsAndResponses())
.userAgentSuffix(chatModelProperties.userAgentSuffix())
.customHeaders(chatModelProperties.customHeaders());
if (chatModelProperties.nonAzureApiKey() != null) {
builder.nonAzureApiKey(chatModelProperties.nonAzureApiKey());
}
return builder.build();
}
Expand All @@ -112,22 +112,22 @@ AzureOpenAiEmbeddingModel openAiEmbeddingModelByNonAzureApiKey(Properties proper
}

AzureOpenAiEmbeddingModel openAiEmbeddingModel(Properties properties, Tokenizer tokenizer) {
EmbeddingModelProperties embeddingModelProperties = properties.getEmbeddingModel();
EmbeddingModelProperties embeddingModelProperties = properties.embeddingModel();
AzureOpenAiEmbeddingModel.Builder builder = AzureOpenAiEmbeddingModel.builder()
.endpoint(embeddingModelProperties.getEndpoint())
.serviceVersion(embeddingModelProperties.getServiceVersion())
.apiKey(embeddingModelProperties.getApiKey())
.deploymentName(embeddingModelProperties.getDeploymentName())
.endpoint(embeddingModelProperties.endpoint())
.serviceVersion(embeddingModelProperties.serviceVersion())
.apiKey(embeddingModelProperties.apiKey())
.deploymentName(embeddingModelProperties.deploymentName())
.tokenizer(tokenizer)
.timeout(Duration.ofSeconds(embeddingModelProperties.getTimeout() == null ? 0 : embeddingModelProperties.getTimeout()))
.maxRetries(embeddingModelProperties.getMaxRetries())
.timeout(Duration.ofSeconds(embeddingModelProperties.timeout() == null ? 0 : embeddingModelProperties.timeout()))
.maxRetries(embeddingModelProperties.maxRetries())
.proxyOptions(ProxyOptions.fromConfiguration(Configuration.getGlobalConfiguration()))
.logRequestsAndResponses(embeddingModelProperties.getLogRequestsAndResponses() != null && embeddingModelProperties.getLogRequestsAndResponses())
.userAgentSuffix(embeddingModelProperties.getUserAgentSuffix())
.dimensions(embeddingModelProperties.getDimensions())
.customHeaders(embeddingModelProperties.getCustomHeaders());
if (embeddingModelProperties.getNonAzureApiKey() != null) {
builder.nonAzureApiKey(embeddingModelProperties.getNonAzureApiKey());
.logRequestsAndResponses(embeddingModelProperties.logRequestsAndResponses() != null && embeddingModelProperties.logRequestsAndResponses())
.userAgentSuffix(embeddingModelProperties.userAgentSuffix())
.dimensions(embeddingModelProperties.dimensions())
.customHeaders(embeddingModelProperties.customHeaders());
if (embeddingModelProperties.nonAzureApiKey() != null) {
builder.nonAzureApiKey(embeddingModelProperties.nonAzureApiKey());
}
return builder.build();
}
Expand All @@ -145,25 +145,25 @@ AzureOpenAiImageModel openAiImageModelByNonAzureApiKey(Properties properties) {
}

AzureOpenAiImageModel openAiImageModel(Properties properties) {
ImageModelProperties imageModelProperties = properties.getImageModel();
ImageModelProperties imageModelProperties = properties.imageModel();
AzureOpenAiImageModel.Builder builder = AzureOpenAiImageModel.builder()
.endpoint(imageModelProperties.getEndpoint())
.serviceVersion(imageModelProperties.getServiceVersion())
.apiKey(imageModelProperties.getApiKey())
.deploymentName(imageModelProperties.getDeploymentName())
.quality(imageModelProperties.getQuality())
.size(imageModelProperties.getSize())
.user(imageModelProperties.getUser())
.style(imageModelProperties.getStyle())
.responseFormat(imageModelProperties.getResponseFormat())
.timeout(imageModelProperties.getTimeout() == null ? null : Duration.ofSeconds(imageModelProperties.getTimeout()))
.maxRetries(imageModelProperties.getMaxRetries())
.endpoint(imageModelProperties.endpoint())
.serviceVersion(imageModelProperties.serviceVersion())
.apiKey(imageModelProperties.apiKey())
.deploymentName(imageModelProperties.deploymentName())
.quality(imageModelProperties.quality())
.size(imageModelProperties.size())
.user(imageModelProperties.user())
.style(imageModelProperties.style())
.responseFormat(imageModelProperties.responseFormat())
.timeout(imageModelProperties.timeout() == null ? null : Duration.ofSeconds(imageModelProperties.timeout()))
.maxRetries(imageModelProperties.maxRetries())
.proxyOptions(ProxyOptions.fromConfiguration(Configuration.getGlobalConfiguration()))
.logRequestsAndResponses(imageModelProperties.getLogRequestsAndResponses() != null && imageModelProperties.getLogRequestsAndResponses())
.userAgentSuffix(imageModelProperties.getUserAgentSuffix())
.customHeaders(imageModelProperties.getCustomHeaders());
if (imageModelProperties.getNonAzureApiKey() != null) {
builder.nonAzureApiKey(imageModelProperties.getNonAzureApiKey());
.logRequestsAndResponses(imageModelProperties.logRequestsAndResponses() != null && imageModelProperties.logRequestsAndResponses())
.userAgentSuffix(imageModelProperties.userAgentSuffix())
.customHeaders(imageModelProperties.customHeaders());
if (imageModelProperties.nonAzureApiKey() != null) {
builder.nonAzureApiKey(imageModelProperties.nonAzureApiKey());
}
return builder.build();
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,33 +1,29 @@
package dev.langchain4j.azure.openai.spring;

import lombok.Getter;
import lombok.Setter;

import java.util.List;
import java.util.Map;

@Getter
@Setter
class ChatModelProperties {
record ChatModelProperties(

String endpoint;
String serviceVersion;
String apiKey;
String deploymentName;
Integer maxTokens;
Double temperature;
Double topP;
Map<String, Integer> logitBias;
String user;
List<String> stop;
Double presencePenalty;
Double frequencyPenalty;
Long seed;
String responseFormat;
Integer timeout; // TODO use Duration instead
Integer maxRetries;
Boolean logRequestsAndResponses;
String userAgentSuffix;
Map<String, String> customHeaders;
String nonAzureApiKey;
String endpoint,
String serviceVersion,
String apiKey,
String deploymentName,
Integer maxTokens,
Double temperature,
Double topP,
Map<String, Integer> logitBias,
String user,
List<String> stop,
Double presencePenalty,
Double frequencyPenalty,
Long seed,
String responseFormat,
Integer timeout, // TODO use Duration instead
Integer maxRetries,
Boolean logRequestsAndResponses,
String userAgentSuffix,
Map<String, String> customHeaders,
String nonAzureApiKey
) {
}
Original file line number Diff line number Diff line change
@@ -1,23 +1,19 @@
package dev.langchain4j.azure.openai.spring;

import lombok.Getter;
import lombok.Setter;

import java.util.Map;

@Getter
@Setter
class EmbeddingModelProperties {
record EmbeddingModelProperties(

String endpoint;
String serviceVersion;
String apiKey;
String deploymentName;
Integer timeout; // TODO use duration instead
Integer maxRetries;
Boolean logRequestsAndResponses;
String userAgentSuffix;
Integer dimensions;
Map<String, String> customHeaders;
String nonAzureApiKey;
String endpoint,
String serviceVersion,
String apiKey,
String deploymentName,
Integer timeout, // TODO use duration instead
Integer maxRetries,
Boolean logRequestsAndResponses,
String userAgentSuffix,
Integer dimensions,
Map<String, String> customHeaders,
String nonAzureApiKey
) {
}
Original file line number Diff line number Diff line change
@@ -1,27 +1,23 @@
package dev.langchain4j.azure.openai.spring;

import lombok.Getter;
import lombok.Setter;

import java.util.Map;

@Getter
@Setter
class ImageModelProperties {
record ImageModelProperties(

String endpoint;
String serviceVersion;
String apiKey;
String deploymentName;
String quality;
String size;
String user;
String style;
String responseFormat;
Integer timeout;
Integer maxRetries;
Boolean logRequestsAndResponses;
String userAgentSuffix;
Map<String, String> customHeaders;
String nonAzureApiKey;
String endpoint,
String serviceVersion,
String apiKey,
String deploymentName,
String quality,
String size,
String user,
String style,
String responseFormat,
Integer timeout,
Integer maxRetries,
Boolean logRequestsAndResponses,
String userAgentSuffix,
Map<String, String> customHeaders,
String nonAzureApiKey
){
}
Original file line number Diff line number Diff line change
@@ -1,26 +1,22 @@
package dev.langchain4j.azure.openai.spring;

import lombok.Getter;
import lombok.Setter;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.NestedConfigurationProperty;

@Getter
@Setter
@ConfigurationProperties(prefix = Properties.PREFIX)
public class Properties {

static final String PREFIX = "langchain4j.azure-open-ai";
public record Properties(

@NestedConfigurationProperty
ChatModelProperties chatModel;
ChatModelProperties chatModel,

@NestedConfigurationProperty
ChatModelProperties streamingChatModel;
ChatModelProperties streamingChatModel,

@NestedConfigurationProperty
EmbeddingModelProperties embeddingModel;
EmbeddingModelProperties embeddingModel,

@NestedConfigurationProperty
ImageModelProperties imageModel;
ImageModelProperties imageModel
) {
static final String PREFIX = "langchain4j.azure-open-ai";
}
Loading