Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

.Net: ONNX 0.5.0 Add Ogahandle resource managment to Service [WIP] #9644

Draft
wants to merge 5 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ public async Task StreamTextFromChatAsync()
}
}

private async Task StartStreamingChatAsync(IChatCompletionService chatCompletionService)
private async Task StartStreamingChatAsync(OnnxRuntimeGenAIChatCompletionService chatCompletionService)
{
Console.WriteLine("Chat content:");
Console.WriteLine("------------------------");
Expand All @@ -158,7 +158,7 @@ private async Task StartStreamingChatAsync(IChatCompletionService chatCompletion
await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant);
}

private async Task StreamMessageOutputAsync(IChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole)
private async Task StreamMessageOutputAsync(OnnxRuntimeGenAIChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole)
{
bool roleWritten = false;
string fullMessage = string.Empty;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
// Copyright (c) Microsoft. All rights reserved.

using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Runtime.CompilerServices;
Expand All @@ -18,13 +17,11 @@ namespace Microsoft.SemanticKernel.Connectors.Onnx;
/// <summary>
/// Represents a chat completion service using OnnxRuntimeGenAI.
/// </summary>
public sealed class OnnxRuntimeGenAIChatCompletionService : IChatCompletionService, IDisposable
public sealed class OnnxRuntimeGenAIChatCompletionService : IChatCompletionService
{
private readonly string _modelId;
private readonly string _modelPath;
private readonly JsonSerializerOptions? _jsonSerializerOptions;
private Model? _model;
private Tokenizer? _tokenizer;

private Dictionary<string, object?> AttributesInternal { get; } = new();

Expand Down Expand Up @@ -90,13 +87,17 @@ private async IAsyncEnumerable<string> RunInferenceAsync(ChatHistory chatHistory
OnnxRuntimeGenAIPromptExecutionSettings onnxPromptExecutionSettings = this.GetOnnxPromptExecutionSettingsSettings(executionSettings);

var prompt = this.GetPrompt(chatHistory, onnxPromptExecutionSettings);
var tokens = this.GetTokenizer().Encode(prompt);

using var generatorParams = new GeneratorParams(this.GetModel());
using var ogaHandle = new OgaHandle();
using var model = new Model(this._modelPath);
using var tokenizer = new Tokenizer(model);

var tokens = tokenizer.Encode(prompt);

using var generatorParams = new GeneratorParams(model);
this.UpdateGeneratorParamsFromPromptExecutionSettings(generatorParams, onnxPromptExecutionSettings);
generatorParams.SetInputSequences(tokens);

using var generator = new Generator(this.GetModel(), generatorParams);
using var generator = new Generator(model, generatorParams);

bool removeNextTokenStartingWithSpace = true;
while (!generator.IsDone())
Expand All @@ -110,7 +111,7 @@ private async IAsyncEnumerable<string> RunInferenceAsync(ChatHistory chatHistory

var outputTokens = generator.GetSequence(0);
var newToken = outputTokens.Slice(outputTokens.Length - 1, 1);
string output = this.GetTokenizer().Decode(newToken);
string output = tokenizer.Decode(newToken);

if (removeNextTokenStartingWithSpace && output[0] == ' ')
{
Expand All @@ -123,10 +124,6 @@ private async IAsyncEnumerable<string> RunInferenceAsync(ChatHistory chatHistory
}
}

private Model GetModel() => this._model ??= new Model(this._modelPath);

private Tokenizer GetTokenizer() => this._tokenizer ??= new Tokenizer(this.GetModel());

private string GetPrompt(ChatHistory chatHistory, OnnxRuntimeGenAIPromptExecutionSettings onnxRuntimeGenAIPromptExecutionSettings)
{
var promptBuilder = new StringBuilder();
Expand Down Expand Up @@ -206,11 +203,4 @@ private OnnxRuntimeGenAIPromptExecutionSettings GetOnnxPromptExecutionSettingsSe

return OnnxRuntimeGenAIPromptExecutionSettings.FromExecutionSettings(executionSettings);
}

/// <inheritdoc/>
public void Dispose()
{
this._tokenizer?.Dispose();
this._model?.Dispose();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ public async Task ItCanUseKernelInvokeStreamingAsyncAsync()
[Fact(Skip = "For manual verification only")]
public async Task ItCanUseServiceGetStreamingChatMessageContentsAsync()
{
using var chat = CreateService();
var chat = CreateService();

ChatHistory history = [];
history.AddUserMessage("Where is the most famous fish market in Seattle, Washington, USA?");
Expand All @@ -76,7 +76,7 @@ public async Task ItCanUseServiceGetStreamingChatMessageContentsAsync()
[Fact(Skip = "For manual verification only")]
public async Task ItCanUseServiceGetChatMessageContentsAsync()
{
using var chat = CreateService();
var chat = CreateService();

ChatHistory history = [];
history.AddUserMessage("Where is the most famous fish market in Seattle, Washington, USA?");
Expand Down
Loading