Skip to content

Commit

Permalink
Merge branch 'main' into issues/9711-ollama-jsonelement-fc
Browse files Browse the repository at this point in the history
  • Loading branch information
RogerBarreto authored Nov 21, 2024
2 parents 8d5db3f + d8acb75 commit 69d19d9
Show file tree
Hide file tree
Showing 67 changed files with 1,819 additions and 606 deletions.
14 changes: 7 additions & 7 deletions dotnet/Directory.Packages.props
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
<PackageVersion Include="Microsoft.Extensions.Configuration.Abstractions" Version="8.0.0" />
<PackageVersion Include="Microsoft.ML.Tokenizers.Data.Cl100kBase" Version="1.0.0" />
<PackageVersion Include="Microsoft.IdentityModel.JsonWebTokens" Version="6.34.0" />
<PackageVersion Include="Microsoft.VisualStudio.Threading" Version="17.11.20" />
<PackageVersion Include="Microsoft.VisualStudio.Threading" Version="17.12.19" />
<PackageVersion Include="MSTest.TestFramework" Version="3.6.1" />
<PackageVersion Include="Newtonsoft.Json" Version="13.0.3" />
<PackageVersion Include="Npgsql" Version="8.0.5" />
Expand Down Expand Up @@ -62,9 +62,9 @@
<PackageVersion Include="Microsoft.DeepDev.TokenizerLib" Version="1.3.3" />
<PackageVersion Include="SharpToken" Version="2.0.3" />
<!-- Microsoft.Extensions.* -->
<PackageVersion Include="Microsoft.Extensions.AI" Version="9.0.0-preview.9.24525.1" />
<PackageVersion Include="Microsoft.Extensions.AI.Abstractions" Version="9.0.0-preview.9.24525.1" />
<PackageVersion Include="Microsoft.Extensions.AI.AzureAIInference" Version="9.0.0-preview.9.24525.1" />
<PackageVersion Include="Microsoft.Extensions.AI" Version="9.0.0-preview.9.24556.5" />
<PackageVersion Include="Microsoft.Extensions.AI.Abstractions" Version="9.0.0-preview.9.24556.5" />
<PackageVersion Include="Microsoft.Extensions.AI.AzureAIInference" Version="9.0.0-preview.9.24556.5" />
<PackageVersion Include="Microsoft.Extensions.Configuration" Version="8.0.0" />
<PackageVersion Include="Microsoft.Extensions.Configuration.Binder" Version="8.0.2" />
<PackageVersion Include="Microsoft.Extensions.Configuration.EnvironmentVariables" Version="8.0.0" />
Expand Down Expand Up @@ -106,8 +106,8 @@
<PackageVersion Include="Microsoft.Identity.Client.Extensions.Msal" Version="[2.28.0, )" />
<PackageVersion Include="Microsoft.OpenApi" Version="1.6.22" />
<PackageVersion Include="Microsoft.OpenApi.Readers" Version="1.6.22" />
<PackageVersion Include="Microsoft.OpenApi.ApiManifest" Version="0.5.5-preview" />
<PackageVersion Include="Microsoft.Plugins.Manifest" Version="1.0.0-preview3" />
<PackageVersion Include="Microsoft.OpenApi.ApiManifest" Version="0.5.6-preview" />
<PackageVersion Include="Microsoft.Plugins.Manifest" Version="1.0.0-rc2" />
<PackageVersion Include="Google.Apis.CustomSearchAPI.v1" Version="[1.60.0.3001, )" />
<PackageVersion Include="Grpc.Net.Client" Version="2.66.0" />
<PackageVersion Include="protobuf-net" Version="3.2.45" />
Expand All @@ -130,7 +130,7 @@
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageVersion Include="Microsoft.VisualStudio.Threading.Analyzers" Version="17.11.20" />
<PackageVersion Include="Microsoft.VisualStudio.Threading.Analyzers" Version="17.12.19" />
<PackageReference Include="Microsoft.VisualStudio.Threading.Analyzers">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
Expand Down
57 changes: 38 additions & 19 deletions dotnet/samples/Concepts/Filtering/TelemetryWithFilters.cs
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,17 @@ public async Task OnFunctionInvocationAsync(FunctionInvocationContext context, F

logger.LogInformation("Function {FunctionName} succeeded.", context.Function.Name);

await this.LogFunctionResultAsync(context);
if (context.IsStreaming)
{
// Overriding the result in a streaming scenario enables the filter to stream chunks
// back to the operation's origin without interrupting the data flow.
var enumerable = context.Result.GetValue<IAsyncEnumerable<StreamingChatMessageContent>>();
context.Result = new FunctionResult(context.Result, ProcessFunctionResultStreamingAsync(enumerable!));
}
else
{
ProcessFunctionResult(context.Result);
}
}
catch (Exception exception)
{
Expand All @@ -167,34 +177,43 @@ public async Task OnFunctionInvocationAsync(FunctionInvocationContext context, F
}
}

private async Task LogFunctionResultAsync(FunctionInvocationContext context)
private void ProcessFunctionResult(FunctionResult functionResult)
{
string? result = functionResult.GetValue<string>();
object? usage = functionResult.Metadata?["Usage"];

if (!string.IsNullOrWhiteSpace(result))
{
logger.LogTrace("Function result: {Result}", result);
}

if (logger.IsEnabled(LogLevel.Information) && usage is not null)
{
logger.LogInformation("Usage: {Usage}", JsonSerializer.Serialize(usage));
}
}

private async IAsyncEnumerable<StreamingChatMessageContent> ProcessFunctionResultStreamingAsync(IAsyncEnumerable<StreamingChatMessageContent> data)
{
string? result = null;
object? usage = null;

if (context.IsStreaming)
var stringBuilder = new StringBuilder();

await foreach (var item in data)
{
var stringBuilder = new StringBuilder();
yield return item;

await foreach (var item in context.Result.GetValue<IAsyncEnumerable<StreamingChatMessageContent>>()!)
if (item.Content is not null)
{
if (item.Content is not null)
{
stringBuilder.Append(item.Content);
}

usage = item.Metadata?["Usage"];
stringBuilder.Append(item.Content);
}

result = stringBuilder.ToString();
}
else
{
result = context.Result.GetValue<string>();
usage = context.Result.Metadata?["Usage"];
usage = item.Metadata?["Usage"];
}

if (result is not null)
var result = stringBuilder.ToString();

if (!string.IsNullOrWhiteSpace(result))
{
logger.LogTrace("Function result: {Result}", result);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,41 @@ public async Task FiltersAreExecutedCorrectlyAsync()
Assert.Equal("Test chat response", result.ToString());
}

[Fact]
public async Task FunctionSequenceIndexIsCorrectForConcurrentCallsAsync()
{
// Arrange
List<int> functionSequenceNumbers = [];
List<int> expectedFunctionSequenceNumbers = [0, 1, 0, 1];

var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { return parameter; }, "Function1");
var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { return parameter; }, "Function2");

var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);

var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
{
functionSequenceNumbers.Add(context.FunctionSequenceIndex);
await next(context);
});

this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses();

// Act
var result = await kernel.InvokePromptAsync("Test prompt", new(new OpenAIPromptExecutionSettings
{
FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(options: new()
{
AllowParallelCalls = true,
AllowConcurrentInvocation = true
})
}));

// Assert
Assert.Equal(expectedFunctionSequenceNumbers, functionSequenceNumbers);
}

[Fact]
public async Task FiltersAreExecutedCorrectlyOnStreamingAsync()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,11 @@ public async IAsyncEnumerable<StreamingChatMessageContent> GetStreamingChatMessa
{
options.TopK = topK;
}
else if (entry.Key.Equals("seed", StringComparison.OrdinalIgnoreCase) &&
TryConvert(entry.Value, out long seed))
{
options.Seed = seed;
}
else if (entry.Key.Equals("max_tokens", StringComparison.OrdinalIgnoreCase) &&
TryConvert(entry.Value, out int maxTokens))
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,11 @@ public void Dispose()
settings.ExtensionData["top_k"] = options.TopK.Value;
}

if (options.Seed is not null)
{
settings.ExtensionData["seed"] = options.Seed.Value;
}

if (options.ResponseFormat is not null)
{
if (options.ResponseFormat is ChatResponseFormatText)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
// Copyright (c) Microsoft. All rights reserved.

using System.Diagnostics.CodeAnalysis;
using System.Threading;
using Microsoft.SemanticKernel.ChatCompletion;

Expand All @@ -9,7 +8,6 @@ namespace Microsoft.SemanticKernel;
/// <summary>
/// Class with data related to automatic function invocation.
/// </summary>
[Experimental("SKEXP0001")]
public class AutoFunctionInvocationContext
{
/// <summary>
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.

using System;
using System.Diagnostics.CodeAnalysis;
using System.Threading.Tasks;

namespace Microsoft.SemanticKernel;
Expand All @@ -11,7 +10,6 @@ namespace Microsoft.SemanticKernel;
/// <summary>
/// Interface for filtering actions during automatic function invocation.
/// </summary>
[Experimental("SKEXP0001")]
public interface IAutoFunctionInvocationFilter
{
/// <summary>
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
// Copyright (c) Microsoft. All rights reserved.

using System.Diagnostics.CodeAnalysis;
using System.Threading;

namespace Microsoft.SemanticKernel;
Expand Down Expand Up @@ -38,7 +37,6 @@ internal FunctionInvocationContext(Kernel kernel, KernelFunction function, Kerne
/// <summary>
/// Boolean flag which indicates whether a filter is invoked within streaming or non-streaming mode.
/// </summary>
[Experimental("SKEXP0001")]
public bool IsStreaming { get; init; }

/// <summary>
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.

using System.Diagnostics.CodeAnalysis;
using System.Threading;

namespace Microsoft.SemanticKernel;

/// <summary>
Expand Down Expand Up @@ -37,7 +37,6 @@ internal PromptRenderContext(Kernel kernel, KernelFunction function, KernelArgum
/// <summary>
/// Boolean flag which indicates whether a filter is invoked within streaming or non-streaming mode.
/// </summary>
[Experimental("SKEXP0001")]
public bool IsStreaming { get; init; }

/// <summary>
Expand Down
1 change: 0 additions & 1 deletion dotnet/src/SemanticKernel.Abstractions/Kernel.cs
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,6 @@ public Kernel Clone() =>
/// <summary>
/// Gets the collection of auto function invocation filters available through the kernel.
/// </summary>
[Experimental("SKEXP0001")]
public IList<IAutoFunctionInvocationFilter> AutoFunctionInvocationFilters =>
this._autoFunctionInvocationFilters ??
Interlocked.CompareExchange(ref this._autoFunctionInvocationFilters, [], null) ??
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -526,7 +526,7 @@ public async Task AsChatCompletionServiceNonStreamingContentConvertedAsExpected(
Assert.Equal(0.5f, actualOptions.FrequencyPenalty);
Assert.Equal(0.75f, actualOptions.TopP);
Assert.Equal(["hello"], actualOptions.StopSequences);
Assert.Equal(42, actualOptions.AdditionalProperties?.TryGetValue("Seed", out int seed) is true ? seed : 0);
Assert.Equal(42, actualOptions.Seed);
Assert.Equal("user123", actualOptions.AdditionalProperties?["User"]);
}

Expand Down Expand Up @@ -621,7 +621,7 @@ public async Task AsChatCompletionServiceStreamingContentConvertedAsExpected()
Assert.Equal(0.5f, actualOptions.FrequencyPenalty);
Assert.Equal(0.75f, actualOptions.TopP);
Assert.Equal(["hello"], actualOptions.StopSequences);
Assert.Equal(42, actualOptions.AdditionalProperties?.TryGetValue("Seed", out int seed) is true ? seed : 0);
Assert.Equal(42, actualOptions.Seed);
Assert.Equal("user123", actualOptions.AdditionalProperties?["User"]);
}

Expand Down
3 changes: 1 addition & 2 deletions python/samples/concepts/chat_completion/chat_bedrock_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,7 @@
import asyncio

from semantic_kernel import Kernel
from semantic_kernel.connectors.ai.bedrock.bedrock_prompt_execution_settings import BedrockChatPromptExecutionSettings
from semantic_kernel.connectors.ai.bedrock.services.bedrock_chat_completion import BedrockChatCompletion
from semantic_kernel.connectors.ai.bedrock import BedrockChatCompletion, BedrockChatPromptExecutionSettings
from semantic_kernel.contents import ChatHistory

system_message = """
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

from openai import AsyncOpenAI

from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion
from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion
from semantic_kernel.contents.chat_history import ChatHistory
from semantic_kernel.functions.kernel_arguments import KernelArguments
from semantic_kernel.kernel import Kernel
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from openai import AsyncOpenAI

from semantic_kernel.connectors.ai.open_ai.services.open_ai_text_embedding import OpenAITextEmbedding
from semantic_kernel.connectors.ai.open_ai import OpenAITextEmbedding
from semantic_kernel.core_plugins.text_memory_plugin import TextMemoryPlugin
from semantic_kernel.kernel import Kernel
from semantic_kernel.memory.semantic_text_memory import SemanticTextMemory
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

from openai import AsyncOpenAI

from semantic_kernel.connectors.ai.open_ai.services.open_ai_chat_completion import OpenAIChatCompletion
from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion
from semantic_kernel.contents.chat_history import ChatHistory
from semantic_kernel.functions.kernel_arguments import KernelArguments
from semantic_kernel.kernel import Kernel
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,7 @@
formatted_question,
formatted_system_message,
)
from semantic_kernel.connectors.ai.azure_ai_inference.services.azure_ai_inference_chat_completion import (
AzureAIInferenceChatCompletion,
)
from semantic_kernel.connectors.ai.azure_ai_inference import AzureAIInferenceChatCompletion
from semantic_kernel.connectors.ai.chat_completion_client_base import ChatCompletionClientBase
from semantic_kernel.contents.chat_history import ChatHistory
from semantic_kernel.functions.kernel_arguments import KernelArguments
Expand Down
Loading

0 comments on commit 69d19d9

Please sign in to comment.