diff --git a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/Ollama.kt b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/Ollama.kt index 94cb365..fda63b1 100644 --- a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/Ollama.kt +++ b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/Ollama.kt @@ -1,8 +1,14 @@ package com.tddworks.ollama.api +import com.tddworks.common.network.api.ktor.api.HttpRequester +import com.tddworks.common.network.api.ktor.internal.createHttpClient +import com.tddworks.common.network.api.ktor.internal.default import com.tddworks.ollama.api.chat.OllamaChat +import com.tddworks.ollama.api.chat.internal.DefaultOllamaChatApi import com.tddworks.ollama.api.generate.OllamaGenerate +import com.tddworks.ollama.api.generate.internal.DefaultOllamaGenerateApi import com.tddworks.ollama.api.internal.OllamaApi +import com.tddworks.ollama.api.json.JsonLenient /** * Interface for interacting with the Ollama API. @@ -13,6 +19,26 @@ interface Ollama : OllamaChat, OllamaGenerate { const val BASE_URL = "localhost" const val PORT = 11434 const val PROTOCOL = "http" + + fun create(ollamaConfig: OllamaConfig): Ollama { + + val requester = HttpRequester.default( + createHttpClient( + host = ollamaConfig.baseUrl, + port = ollamaConfig.port, + protocol = ollamaConfig.protocol, + json = JsonLenient, + ) + ) + val ollamaChat = DefaultOllamaChatApi(requester = requester) + val ollamaGenerate = DefaultOllamaGenerateApi(requester = requester) + + return OllamaApi( + config = ollamaConfig, + ollamaChat = ollamaChat, + ollamaGenerate = ollamaGenerate + ) + } } /** @@ -35,16 +61,4 @@ interface Ollama : OllamaChat, OllamaGenerate { * @return a string representing the protocol */ fun protocol(): String -} - -fun Ollama( - baseUrl: () -> String = { Ollama.BASE_URL }, - port: () -> Int = { Ollama.PORT }, - protocol: () -> String = { Ollama.PROTOCOL }, -): Ollama { - return OllamaApi( - baseUrl = baseUrl(), - port = port(), - protocol = protocol() - ) } \ No newline at end of file diff --git a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/internal/OllamaApi.kt b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/internal/OllamaApi.kt index c3cb7b3..a85707e 100644 --- a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/internal/OllamaApi.kt +++ b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/api/internal/OllamaApi.kt @@ -2,49 +2,37 @@ package com.tddworks.ollama.api.internal import com.tddworks.di.getInstance import com.tddworks.ollama.api.Ollama +import com.tddworks.ollama.api.OllamaConfig import com.tddworks.ollama.api.chat.OllamaChat import com.tddworks.ollama.api.generate.OllamaGenerate class OllamaApi( - private val baseUrl: String, - private val port: Int, - private val protocol: String, -) : Ollama, OllamaChat by getInstance(), OllamaGenerate by getInstance() { + private val config: OllamaConfig, + private val ollamaChat: OllamaChat, + private val ollamaGenerate: OllamaGenerate +) : Ollama, OllamaChat by ollamaChat, OllamaGenerate by ollamaGenerate { override fun baseUrl(): String { - return baseUrl + return config.baseUrl() } override fun port(): Int { - return port + return config.port() } override fun protocol(): String { - return protocol + return config.protocol() } - -} - -fun Ollama( - baseUrl: () -> String = { Ollama.BASE_URL }, - port: () -> Int = { Ollama.PORT }, - protocol: () -> String = { Ollama.PROTOCOL }, -): Ollama { - return OllamaApi( - baseUrl = baseUrl(), - port = port(), - protocol = protocol() - ) } fun Ollama.Companion.create( - baseUrl: () -> String = { BASE_URL }, - port: () -> Int = { PORT }, - protocol: () -> String = { PROTOCOL }, + config: OllamaConfig, + ollamaChat: OllamaChat = getInstance(), + ollamaGenerate: OllamaGenerate = getInstance() ): Ollama { return OllamaApi( - baseUrl = baseUrl(), - port = port(), - protocol = protocol() + config = config, + ollamaChat = ollamaChat, + ollamaGenerate = ollamaGenerate ) } \ No newline at end of file diff --git a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/di/Koin.kt b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/di/Koin.kt index 8f20ae7..235da37 100644 --- a/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/di/Koin.kt +++ b/ollama-client/ollama-client-core/src/commonMain/kotlin/com/tddworks/ollama/di/Koin.kt @@ -29,11 +29,7 @@ fun ollamaModules( ) = module { single { - Ollama( - baseUrl = config.baseUrl, - port = config.port, - protocol = config.protocol - ) + Ollama.create(ollamaConfig = config) } single(named("ollamaJson")) { JsonLenient } diff --git a/openai-gateway/openai-gateway-core/src/commonMain/kotlin/com/tddworks/openai/gateway/api/internal/OllamaOpenAIProvider.kt b/openai-gateway/openai-gateway-core/src/commonMain/kotlin/com/tddworks/openai/gateway/api/internal/OllamaOpenAIProvider.kt index 9c0a5d4..ed9b7e1 100644 --- a/openai-gateway/openai-gateway-core/src/commonMain/kotlin/com/tddworks/openai/gateway/api/internal/OllamaOpenAIProvider.kt +++ b/openai-gateway/openai-gateway-core/src/commonMain/kotlin/com/tddworks/openai/gateway/api/internal/OllamaOpenAIProvider.kt @@ -1,9 +1,9 @@ package com.tddworks.openai.gateway.api.internal import com.tddworks.ollama.api.Ollama +import com.tddworks.ollama.api.OllamaConfig import com.tddworks.ollama.api.OllamaModel import com.tddworks.ollama.api.chat.api.* -import com.tddworks.ollama.api.internal.create import com.tddworks.openai.api.chat.api.ChatCompletion import com.tddworks.openai.api.chat.api.ChatCompletionChunk import com.tddworks.openai.api.chat.api.ChatCompletionRequest @@ -23,9 +23,11 @@ class OllamaOpenAIProvider( OpenAIModel(it.value) }, private val client: Ollama = Ollama.create( - baseUrl = config.baseUrl, - port = config.port, - protocol = config.protocol + ollamaConfig = OllamaConfig( + baseUrl = config.baseUrl, + port = config.port, + protocol = config.protocol + ) ) ) : OpenAIProvider { /** @@ -75,9 +77,11 @@ fun OpenAIProvider.Companion.ollama( OpenAIModel(it.value) }, client: Ollama = Ollama.create( - baseUrl = config.baseUrl, - port = config.port, - protocol = config.protocol + ollamaConfig = OllamaConfig( + baseUrl = config.baseUrl, + port = config.port, + protocol = config.protocol + ) ) ): OpenAIProvider { return OllamaOpenAIProvider(config = config, models = models, client = client)