diff --git a/.github/_typos.toml b/.github/_typos.toml
index 37bf426d04b6..917745e1ae83 100644
--- a/.github/_typos.toml
+++ b/.github/_typos.toml
@@ -15,6 +15,7 @@ extend-exclude = [
"CodeTokenizerTests.cs",
"test_code_tokenizer.py",
"*response.json",
+ "test_content.txt",
]
[default.extend-words]
diff --git a/docs/decisions/0046-azure-model-as-a-service.md b/docs/decisions/0046-azure-model-as-a-service.md
new file mode 100644
index 000000000000..a91468e253b0
--- /dev/null
+++ b/docs/decisions/0046-azure-model-as-a-service.md
@@ -0,0 +1,44 @@
+---
+# These are optional elements. Feel free to remove any of them.
+status: { accepted }
+contact: { rogerbarreto, taochen }
+date: { 2024-06-20 }
+deciders: { alliscode, moonbox3, eavanvalkenburg }
+consulted: {}
+informed: {}
+---
+
+# Support for Azure Model-as-a-Service in SK
+
+## Context and Problem Statement
+
+There has been a demand from customers for the implementation of Model-as-a-Service (MaaS) in SK. MaaS, which is also referred to as [serverless API](https://learn.microsoft.com/en-us/azure/ai-studio/how-to/model-catalog-overview#model-deployment-managed-compute-and-serverless-api-pay-as-you-go), is available in [Azure AI Studio](https://learn.microsoft.com/en-us/azure/ai-studio/what-is-ai-studio). This mode of consumption operates on a pay-as-you-go basis, typically using tokens for billing purposes. Clients can access the service via the [Azure AI Model Inference API](https://learn.microsoft.com/en-us/azure/ai-studio/reference/reference-model-inference-api?tabs=azure-studio) or client SDKs.
+
+At present, there is no official support for MaaS in SK. The purpose of this ADR is to examine the constraints of the service and explore potential solutions to enable support for the service in SK via the development of a new AI connector.
+
+## Client SDK
+
+The Azure team will be providing a new client library, namely `Azure.AI.Inference` in .Net and `azure-ai-inference` in Python, for effectively interacting with the service. While the service API is OpenAI-compatible, it is not permissible to use the OpenAI and the Azure OpenAI client libraries for interacting with the service as they are not independent with respect to both the models and their providers. This is because Azure AI Studio features a diverse range of open-source models, other than OpenAI models.
+
+### Limitations
+
+The initial release of the client SDK will only support chat completion and text/image embedding generation, with image generation to be added later.
+
+Plans to support for text completion are currently unclear, and it is highly unlikely that the SDK will ever include support for text completion. As a result, the new AI connector will **NOT** support text completions in the initial version until we get more customer signals or the client SDK adds support.
+
+## AI Connector
+
+### Naming options
+
+- Azure
+- AzureAI
+- AzureAIInference
+- AzureAIModelInference
+
+ Decision: `AzureAIInference`
+
+### Support for model-specific parameters
+
+Models can possess supplementary parameters that are not part of the default API. The service API and the client SDK enable the provision of model-specific parameters. Users can provide model-specific settings via a dedicated argument along with other settings, such as `temperature` and `top_p`, among others.
+
+In the context of SK, execution parameters are categorized under `PromptExecutionSettings`, which is inherited by all connector-specific setting classes. The settings of the new connector will contain a member of type `dictionary`, which will group together the model-specific parameters.
diff --git a/docs/decisions/0045-kernel-content-graduation.md b/docs/decisions/0046-kernel-content-graduation.md
similarity index 100%
rename from docs/decisions/0045-kernel-content-graduation.md
rename to docs/decisions/0046-kernel-content-graduation.md
diff --git a/docs/decisions/0047-azure-open-ai-connectors.md b/docs/decisions/0047-azure-open-ai-connectors.md
new file mode 100644
index 000000000000..c909574b9563
--- /dev/null
+++ b/docs/decisions/0047-azure-open-ai-connectors.md
@@ -0,0 +1,210 @@
+---
+# These are optional elements. Feel free to remove any of them.
+status: approved
+contact: rogerbarreto
+date: 2024-06-24
+deciders: rogerbarreto, matthewbolanos, markwallace-microsoft, sergeymenshykh
+consulted: stephentoub, dmytrostruk
+---
+
+# OpenAI and Azure Connectors Naming and Structuring
+
+## Context and Problem Statement
+
+It has recently been announced that OpenAI and Azure will each have their own dedicated SDKs for accessing their services. Previously, there was no official SDK for OpenAI, and our OpenAI Connector relied solely on the Azure SDK client for access.
+
+With the introduction of the official OpenAI SDK, we now have access to more up-to-date features provided by OpenAI, making it advantageous to use this SDK instead of the Azure SDK.
+
+Additionally, it has become clear that we need to separate the OpenAI connector into two distinct targets: one for OpenAI and another for Azure OpenAI. This separation will enhance code clarity and facilitate a better understanding of the usage of each target.
+
+## Decision Drivers
+
+- Update our connectors to use latest versions of OpenAI and Azure SDKs.
+- Minimize or eliminate any breaking changes for developers currently using the existing OpenAI connector.
+- Changes made should be be future proof.
+
+## Versioning
+
+Although current `Azure.AI.OpenAI` and `OpenAI` SDK packages have its major versions updated (2.0.0), that change does not represent a `SemanticKernel` major breaking change. Any of the alternative options provided below take in consideration the that the new updated version of `SemanticKernel.Connectors.OpenAI` and `SemanticKernel.Connectors.AzureOpenAI` will be a minor version bump `1.N+1.0` for all SemanticKernel packages.
+
+### Meta Package Strategy
+
+Currently the `Microsoft.SemanticKernel` package is a meta package that includes both `SemanticKernel.Core` and `SemanticKernel.Connectors.OpenAI`, with the new changes a new project will be added to the meta package `SemanticKernel.Connectors.AzureOpenAI` that will include the new Azure OpenAI connector.
+
+## Documentation (Upgrade Path)
+
+A documentation guidance and samples/examples will be created to guide on how to upgrade from the current OpenAI connector to the new when needed.
+
+## OpenAI SDK limitations
+
+The new OpenAI SDK introduce some limitations that need to be considered and pontentially can introduce breaking changes if not remediated by our internal implementation.
+
+- #### ⚠️ No support for multiple results (Choices) per request.
+
+ **Remediation**: Internally make the multiple requests and combine them.
+ **No remediation**: Breaking change removing `ResultsPerPrompt` from `OpenAIPromptExecutionSettings`.
+
+- #### ⚠️ Text Generation modality is not supported.
+
+ **Remediation**: Internally provide a HttpClient to be used against `gpt-3.5-turbo-instruct` for text generation modality. Same way was done for `TextToImage`, `AudioToText` service modalities.
+ **No remediation**: Breaking change removing any specific `TextGeneration` service implementations, this change don't impact `ChatCompletion` services that may still being used as `ITextGenerationService` implementations.
+
+## Improvements
+
+This also represents an opportunity to improve the current OpenAI connector by introducing the `Configuration` pattern to allow more flexibility and control over the services and their configurations.
+
+```csharp
+// Before
+builder.AddAzureOpenAIChatCompletion(deploymentName, endpoint, apiKey, httpClient);
+// After
+builder.AddAzureOpenAIChatCompletion(new
+{
+ DeploymentName = modelId;
+ Endpoint = endpoint;
+ ApiKey = apiKey;
+});
+```
+
+```csharp
+// Before
+builder.AddAzureOpenAIChatCompletion(deploymentName, openAIClient, serviceId, modelId)
+// After
+builder.AddAzureOpenAIChatCompletion(new
+{
+ DeploymentName = deploymentName;
+ ServiceId = serviceId;
+ ModelId = modelId;
+}, openAIClient);
+```
+
+## Potential Dependency Conflicts
+
+Since `SemanticKernel.Connectors.AzureOpenAI` and `SemanticKernel.Connectors.OpenAI` share same `OpenAI 2.0.0` dependency, if the vestion of `OpenAI 2.0.0` differ on each, that may create conflict when both connector packages are used together in a project.
+
+If this happens:
+
+1. Before updating our OpenAI connector package we will get in touch with `Azure.AI.OpenAI` team to align on the ETAs for their update.
+
+2. Investigate if the most recent `OpenAI` package when used with a `Azure.AI.OpenAI` that initially was targeting an older version of `OpenAI` SDK will not cause any breaking changes or conflicts.
+
+3. If There are conflicts and their ETA is small we may keep the `OpenAI` dependency on our `SemanticKernel.Connectors.OpenAI` similar to Azure's for a short period of time, otherwise we will evaluate moving forward with the `OpenAI` dependency version upgrade.
+
+## Considered Options
+
+- Option 1 - Merge New and Legacy (Slow transition for independent connectors).
+- Option 2 - Independent Connectors from Start.
+- Option 3 - Keep OpenAI and Azure in the same connector (As is).
+
+## Option 1 - Merge New and Legacy (Slow transition for independent connectors).
+
+This is the least breaking approach where we keep the current legacy OpenAI and AzureOpenAI APIs temporarily in the connector using last Azure SDK `Azure.AI.OpenAI 1.0.0-beta.17` and add new OpenAI specific APIs using the new `OpenAI 2.0.0-beta.*` SDK package.
+
+This approach also implies that a new connector will be created on a second moment for Azure OpenAI services specifically fully dependent on the latest `Azure.AI.OpenAI 2.0.0-beta.*` SDK package.
+
+In a later stage we will deprecate all the OpenAI and Azure legacy APIs in the `SemanticKernel.Connectors.OpenAI` namespace and remove Azure SDK `Azure.AI.OpenAI 1.0.0-beta.17` and those APIs in a future release, making the OpenAI Connector fully dedicated for OpenAI services only depending on with the `OpenAI 2.0.0-beta.*` dependency.
+
+```mermaid
+graph TD
+ A[SemanticKernel.Connectors.OpenAI] --> B[OpenAI 2.0.0-beta.*]
+ A --> C[Azure.OpenAI 1.0.0-beta.17]
+ D[SemanticKernel.Connectors.AzureOpenAI] --> E[Azure.AI.OpenAI 2.0.0-beta.*]
+```
+
+The new `Options` pattern we be used as an improvement as well as a measure to avoid breaking changes with the legacy APIs.
+
+Following this change the `SemanticKernel.Connectors.OpenAI` and a new `SemanticKernel.Connectors.AzureOpenAI` connector will be created for Azure specific services, using the new Azure SDK `Azure.AI.OpenAI 2.0.0-beta.*` with all new APIs using the options approach.
+
+### Phases of the transition
+
+- **Phase 1**: Add new OpenAI SDK APIs to the current OpenAI connector and keep the Azure OpenAI APIs using the last Azure SDK.
+- **Phase 2**:
+ - Create a new connector for Azure OpenAI services using the new Azure SDK
+ - Deprecate all Azure OpenAI APIs in the `OpenAI` connector pointing to new `AzureOpenAI` connector
+ - Remove Azure SDK dependency from the OpenAI connector.
+ - Add `AzureOpenAI` connector to the `Microsoft.SemanticKernel` meta package.
+- **Phase 3**: Deprecate all legacy `OpenAI APIs` in the `OpenAI` connector pointing to new `Options` APIs.
+- **Phase 4**: Remove all legacy APIs from the OpenAI connector.
+
+### Impact
+
+Pros:
+
+- Minimal breaking changes for developers using the current OpenAI connector.
+- Clear separation of concerns between OpenAI and Azure OpenAI connectors.
+
+Cons:
+
+- Since `SemanticKernel.Connectors.AzureOpenAI` and `SemanticKernel.Connectors.OpenAI` share a same dependency of different versions, both packages cannot be used in the same project and a strategy will be needed when deploying both connectors.
+- Added dependency for both `Azure OpenAI 1.0-beta17` and `OpenAI 2.0-beta1`.
+
+### Dependency Management Strategies
+
+1. Use only one of the connectors in the same project, some modifications will be needed to accommodate `Concepts` and other projects that shares OpenAI and AzureOpenAI examples.
+2. Hold AzureOpenAI connector implementation until we are ready to break (exclude) all Azure APIs in OpenAI connector.
+3. Deploy a new project with a new namespace for `Azure.AI.OpenAI.Legacy 1.0.0-beta.17` and update our `SemanticKernel.Connectors.OpenAI` to use this new namespace to avoid version clashing on the `Azure.AI.OpenAI` namespace.
+
+## Option 2 - Independent Connectors from Start.
+
+This option is focused on creating fully independent connectors for OpenAI and Azure OpenAI services since the start with all breaking changes needed to achieve that.
+
+```mermaid
+graph TD
+ D[SemanticKernel.Connectors.AzureOpenAI] --> E[Azure.AI.OpenAI 2.0.0-beta.*]
+ E --> B[OpenAI 2.0.0-beta.*]
+ A[SemanticKernel.Connectors.OpenAI] --> B[OpenAI 2.0.0-beta.*]
+```
+
+Impact:
+
+- All `Azure` related logic will be removed from `SemanticKernel.Connectors.OpenAI` to avoid any clashes with same names introduced in the new `SemanticKernel.Connectors.AzureOpenAI` as well as sending a congruent message to developers that the OpenAI connector is focused on OpenAI services only moving forward.
+
+### Impact
+
+Pros:
+
+- Clear separation of concerns between OpenAI and Azure OpenAI connectors.
+- Small breaking changes for developers focused on OpenAI specific APIs.
+- Faster transition to the new OpenAI SDK and Azure OpenAI SDK.
+
+Cons:
+
+- Large breaking changes for developers using the current OpenAI connector for Azure.
+- [Potential Dependency Conflicts](#potential-dependency-conflicts) may arise if the `Azure.AI.OpenAI` team does not update their package.
+
+## Option 3 - Keep OpenAI and Azure in the same connector (As is).
+
+This option is fully focused in the least impact possible, combining both Azure and OpenAI SDK dependencies in one single connector following the same approach as the current connector.
+
+Changes:
+
+1. Update all current OpenAI specific services and client to use new OpenAI SDK
+2. Update Azure specific services and client to use the latest Azure OpenAI SDK.
+3. Optionally add `Options` pattern new APIs to the connector services and deprecate old ones.
+
+### Impact
+
+Pros:
+
+- Minimal breaking changes for developers using the current OpenAI connector.
+- The breaking changes will be limited on how we tackle the points mentioned in the [OpenAI SDK limitations](#openai-sdk-limitations) above.
+- Will not have a dependency conflict between `Azure.AI.OpenAI` and `OpenAI` SDKs.
+
+Cons:
+
+- We will be limited on the OpenAI SDK version that is used by the latest `Azure.AI.OpenAI` package, which may not be the latest version available.
+- When using direct Azure or OpenAI specific services developers don't expect to see other provider specific services in their pool of options and dependencies.
+
+## Decision Outcome
+
+### Option 2 - Independent Connectors from Start.
+
+This option is the faster approach on transitioning to a potential 1.0 general availability of `OpenAI` SDK.
+
+This also option provides a clear separation of concerns between OpenAI and Azure OpenAI connectors from the start.
+
+Prevents any confusion sending a clear message on our intentions on splitting `OpenAI` and `AzureOpenAI` components away.
+
+#### OpenAI SDK limitations:
+
+- [Multiple results](#openai-sdk-limitations): **Do not remediate**.
+- [Text Generation modality is not supported](#openai-sdk-limitations): **Do not remediate**.
diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props
index 146311afca6f..bb4233ad6ba9 100644
--- a/dotnet/Directory.Packages.props
+++ b/dotnet/Directory.Packages.props
@@ -9,9 +9,9 @@
-
+
-
+
@@ -20,7 +20,7 @@
-
+
@@ -53,14 +53,14 @@
-
+
-
+
@@ -73,8 +73,8 @@
-
-
+
+
@@ -85,7 +85,7 @@
-
+
@@ -97,7 +97,7 @@
-
+ allruntime; build; native; contentfiles; analyzers; buildtransitive
diff --git a/dotnet/nuget/nuget-package.props b/dotnet/nuget/nuget-package.props
index 6a48e76f58fc..d91b4c61c640 100644
--- a/dotnet/nuget/nuget-package.props
+++ b/dotnet/nuget/nuget-package.props
@@ -1,7 +1,7 @@
- 1.15.0
+ 1.15.1$(VersionPrefix)-$(VersionSuffix)$(VersionPrefix)
diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs
new file mode 100644
index 000000000000..ee6fb9b38f2a
--- /dev/null
+++ b/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs
@@ -0,0 +1,69 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Text;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents;
+using Microsoft.SemanticKernel.ChatCompletion;
+
+namespace Agents;
+
+///
+/// Demonstrate creation of and
+/// eliciting its response to three explicit user messages.
+///
+public class ChatCompletion_Streaming(ITestOutputHelper output) : BaseTest(output)
+{
+ private const string ParrotName = "Parrot";
+ private const string ParrotInstructions = "Repeat the user message in the voice of a pirate and then end with a parrot sound.";
+
+ [Fact]
+ public async Task UseStreamingChatCompletionAgentAsync()
+ {
+ // Define the agent
+ ChatCompletionAgent agent =
+ new()
+ {
+ Name = ParrotName,
+ Instructions = ParrotInstructions,
+ Kernel = this.CreateKernelWithChatCompletion(),
+ };
+
+ ChatHistory chat = [];
+
+ // Respond to user input
+ await InvokeAgentAsync("Fortune favors the bold.");
+ await InvokeAgentAsync("I came, I saw, I conquered.");
+ await InvokeAgentAsync("Practice makes perfect.");
+
+ // Local function to invoke agent and display the conversation messages.
+ async Task InvokeAgentAsync(string input)
+ {
+ chat.Add(new ChatMessageContent(AuthorRole.User, input));
+
+ Console.WriteLine($"# {AuthorRole.User}: '{input}'");
+
+ StringBuilder builder = new();
+ await foreach (StreamingChatMessageContent message in agent.InvokeStreamingAsync(chat))
+ {
+ if (string.IsNullOrEmpty(message.Content))
+ {
+ continue;
+ }
+
+ if (builder.Length == 0)
+ {
+ Console.WriteLine($"# {message.Role} - {message.AuthorName ?? "*"}:");
+ }
+
+ Console.WriteLine($"\t > streamed: '{message.Content}'");
+ builder.Append(message.Content);
+ }
+
+ if (builder.Length > 0)
+ {
+ // Display full response and capture in chat history
+ Console.WriteLine($"\t > complete: '{builder}'");
+ chat.Add(new ChatMessageContent(AuthorRole.Assistant, builder.ToString()) { AuthorName = agent.Name });
+ }
+ }
+ }
+}
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileService.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileService.cs
index ac3c70a750b3..7537f53da726 100644
--- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileService.cs
+++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileService.cs
@@ -1,5 +1,4 @@
// Copyright (c) Microsoft. All rights reserved.
-using Azure.AI.OpenAI.Assistants;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using Resources;
@@ -7,7 +6,7 @@
namespace Agents;
///
-/// Demonstrate uploading and retrieving files with .
+/// Demonstrate using .
///
public class OpenAIAssistant_FileService(ITestOutputHelper output) : BaseTest(output)
{
@@ -19,7 +18,6 @@ public class OpenAIAssistant_FileService(ITestOutputHelper output) : BaseTest(ou
[Fact]
public async Task UploadAndRetrieveFilesAsync()
{
- var openAIClient = new AssistantsClient(TestConfiguration.OpenAI.ApiKey);
OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey);
BinaryContent[] files = [
@@ -29,41 +27,40 @@ public async Task UploadAndRetrieveFilesAsync()
new BinaryContent(data: await EmbeddedResource.ReadAllAsync("travelinfo.txt"), mimeType: "text/plain") { InnerContent = "travelinfo.txt" }
];
- var fileIds = new Dictionary();
- foreach (var file in files)
+ var fileContents = new Dictionary();
+ foreach (BinaryContent file in files)
{
- var result = await openAIClient.UploadFileAsync(new BinaryData(file.Data), Azure.AI.OpenAI.Assistants.OpenAIFilePurpose.FineTune);
- fileIds.Add(result.Value.Id, file);
+ OpenAIFileReference result = await fileService.UploadContentAsync(file, new(file.InnerContent!.ToString()!, OpenAIFilePurpose.FineTune));
+ fileContents.Add(result.Id, file);
}
- foreach (var file in (await openAIClient.GetFilesAsync(Azure.AI.OpenAI.Assistants.OpenAIFilePurpose.FineTune)).Value)
+ foreach (OpenAIFileReference fileReference in await fileService.GetFilesAsync(OpenAIFilePurpose.FineTune))
{
- if (!fileIds.ContainsKey(file.Id))
+ // Only interested in the files we uploaded
+ if (!fileContents.ContainsKey(fileReference.Id))
{
continue;
}
- var data = (await openAIClient.GetFileContentAsync(file.Id)).Value;
+ BinaryContent content = await fileService.GetFileContentAsync(fileReference.Id);
- var mimeType = fileIds[file.Id].MimeType;
- var fileName = fileIds[file.Id].InnerContent!.ToString();
- var metadata = new Dictionary { ["id"] = file.Id };
- var uri = new Uri($"https://api.openai.com/v1/files/{file.Id}/content");
- var content = mimeType switch
+ string? mimeType = fileContents[fileReference.Id].MimeType;
+ string? fileName = fileContents[fileReference.Id].InnerContent!.ToString();
+ ReadOnlyMemory data = content.Data ?? new();
+
+ var typedContent = mimeType switch
{
- "image/jpeg" => new ImageContent(data, mimeType) { Uri = uri, InnerContent = fileName, Metadata = metadata },
- "audio/wav" => new AudioContent(data, mimeType) { Uri = uri, InnerContent = fileName, Metadata = metadata },
- _ => new BinaryContent(data, mimeType) { Uri = uri, InnerContent = fileName, Metadata = metadata }
+ "image/jpeg" => new ImageContent(data, mimeType) { Uri = content.Uri, InnerContent = fileName, Metadata = content.Metadata },
+ "audio/wav" => new AudioContent(data, mimeType) { Uri = content.Uri, InnerContent = fileName, Metadata = content.Metadata },
+ _ => new BinaryContent(data, mimeType) { Uri = content.Uri, InnerContent = fileName, Metadata = content.Metadata }
};
- // Display the the file-name and mime-tyupe for each content type.
- Console.WriteLine($"File: {fileName} - {mimeType}");
-
- // Display the each content type-name.
- Console.WriteLine($"Type: {content}");
+ Console.WriteLine($"\nFile: {fileName} - {mimeType}");
+ Console.WriteLine($"Type: {typedContent}");
+ Console.WriteLine($"Uri: {typedContent.Uri}");
// Delete the test file remotely
- await openAIClient.DeleteFileAsync(file.Id);
+ await fileService.DeleteFileAsync(fileReference.Id);
}
}
}
diff --git a/dotnet/samples/Concepts/ChatCompletion/Google_GeminiChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/Google_GeminiChatCompletion.cs
index de2e996dc2fc..2e8f750e5476 100644
--- a/dotnet/samples/Concepts/ChatCompletion/Google_GeminiChatCompletion.cs
+++ b/dotnet/samples/Concepts/ChatCompletion/Google_GeminiChatCompletion.cs
@@ -89,7 +89,7 @@ private async Task SimpleChatAsync(Kernel kernel)
{
Console.WriteLine("======== Simple Chat ========");
- var chatHistory = new ChatHistory();
+ var chatHistory = new ChatHistory("You are an expert in the tool shop.");
var chat = kernel.GetRequiredService();
// First user message
diff --git a/dotnet/samples/Concepts/ChatCompletion/Google_GeminiChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/Google_GeminiChatCompletionStreaming.cs
index 97f4873cfd52..803a6b6fafcd 100644
--- a/dotnet/samples/Concepts/ChatCompletion/Google_GeminiChatCompletionStreaming.cs
+++ b/dotnet/samples/Concepts/ChatCompletion/Google_GeminiChatCompletionStreaming.cs
@@ -90,7 +90,7 @@ private async Task StreamingChatAsync(Kernel kernel)
{
Console.WriteLine("======== Streaming Chat ========");
- var chatHistory = new ChatHistory();
+ var chatHistory = new ChatHistory("You are an expert in the tool shop.");
var chat = kernel.GetRequiredService();
// First user message
diff --git a/dotnet/samples/Concepts/ChatCompletion/Google_GeminiVision.cs b/dotnet/samples/Concepts/ChatCompletion/Google_GeminiVision.cs
index 1bf70ca28f5b..179b2b40937d 100644
--- a/dotnet/samples/Concepts/ChatCompletion/Google_GeminiVision.cs
+++ b/dotnet/samples/Concepts/ChatCompletion/Google_GeminiVision.cs
@@ -14,7 +14,7 @@ public async Task GoogleAIAsync()
Console.WriteLine("============= Google AI - Gemini Chat Completion with vision =============");
string geminiApiKey = TestConfiguration.GoogleAI.ApiKey;
- string geminiModelId = "gemini-pro-vision";
+ string geminiModelId = TestConfiguration.GoogleAI.Gemini.ModelId;
if (geminiApiKey is null)
{
@@ -28,7 +28,7 @@ public async Task GoogleAIAsync()
apiKey: geminiApiKey)
.Build();
- var chatHistory = new ChatHistory();
+ var chatHistory = new ChatHistory("Your job is describing images.");
var chatCompletionService = kernel.GetRequiredService();
// Load the image from the resources
@@ -55,7 +55,7 @@ public async Task VertexAIAsync()
Console.WriteLine("============= Vertex AI - Gemini Chat Completion with vision =============");
string geminiBearerKey = TestConfiguration.VertexAI.BearerKey;
- string geminiModelId = "gemini-pro-vision";
+ string geminiModelId = TestConfiguration.VertexAI.Gemini.ModelId;
string geminiLocation = TestConfiguration.VertexAI.Location;
string geminiProject = TestConfiguration.VertexAI.ProjectId;
@@ -96,7 +96,7 @@ public async Task VertexAIAsync()
// location: TestConfiguration.VertexAI.Location,
// projectId: TestConfiguration.VertexAI.ProjectId);
- var chatHistory = new ChatHistory();
+ var chatHistory = new ChatHistory("Your job is describing images.");
var chatCompletionService = kernel.GetRequiredService();
// Load the image from the resources
diff --git a/dotnet/samples/Concepts/Filtering/TelemetryWithFilters.cs b/dotnet/samples/Concepts/Filtering/TelemetryWithFilters.cs
new file mode 100644
index 000000000000..a2edd8948e51
--- /dev/null
+++ b/dotnet/samples/Concepts/Filtering/TelemetryWithFilters.cs
@@ -0,0 +1,207 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Diagnostics;
+using System.Text.Json;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
+
+namespace Filtering;
+
+///
+/// Kernel and connectors have out-of-the-box telemetry to capture key information, which is available during requests.
+/// In most cases this telemetry should be enough to understand how the application behaves.
+/// This example contains the same telemetry recreated using Filters.
+/// This should allow to extend existing telemetry if needed with additional information and have the same set of logging messages for custom connectors.
+///
+public class TelemetryWithFilters(ITestOutputHelper output) : BaseTest(output)
+{
+ [Fact]
+ public async Task LoggingAsync()
+ {
+ // Initialize kernel with chat completion service.
+ var builder = Kernel
+ .CreateBuilder()
+ .AddOpenAIChatCompletion("gpt-4", TestConfiguration.OpenAI.ApiKey);
+
+ // Create and add logger, which will output messages to test detail summary window.
+ var logger = this.LoggerFactory.CreateLogger();
+ builder.Services.AddSingleton(logger);
+
+ // Add filters with logging.
+ builder.Services.AddSingleton();
+ builder.Services.AddSingleton();
+ builder.Services.AddSingleton();
+
+ var kernel = builder.Build();
+
+ // Import sample functions.
+ kernel.ImportPluginFromFunctions("HelperFunctions",
+ [
+ kernel.CreateFunctionFromMethod(() => DateTime.UtcNow.ToString("R"), "GetCurrentUtcTime", "Retrieves the current time in UTC."),
+ kernel.CreateFunctionFromMethod((string cityName) =>
+ cityName switch
+ {
+ "Boston" => "61 and rainy",
+ "London" => "55 and cloudy",
+ "Miami" => "80 and sunny",
+ "Paris" => "60 and rainy",
+ "Tokyo" => "50 and sunny",
+ "Sydney" => "75 and sunny",
+ "Tel Aviv" => "80 and sunny",
+ _ => "31 and snowing",
+ }, "GetWeatherForCity", "Gets the current weather for the specified city"),
+ ]);
+
+ // Enable automatic function calling.
+ var executionSettings = new OpenAIPromptExecutionSettings
+ {
+ ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions,
+ ModelId = "gpt-4"
+ };
+
+ // Define custom transaction ID to group set of operations related to the request.
+ var transactionId = new Guid("2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2");
+
+ // Note: logging scopes are available for out-of-the-box SK telemetry as well.
+ using (logger.BeginScope($"Transaction ID: [{transactionId}]"))
+ {
+ // Invoke prompt with arguments.
+ const string Prompt = "Given the current time of day and weather, what is the likely color of the sky in {{$city}}?";
+ var result = await kernel.InvokePromptAsync(Prompt, new(executionSettings) { ["city"] = "Boston" });
+
+ Console.WriteLine(result);
+ }
+
+ // Output:
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Function InvokePromptAsync_Id invoking.
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Function arguments: {"city":"Boston"}
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Execution settings: {"default":{"service_id":null,"model_id":"gpt-4"}}
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Rendered prompt: Given the current time of day and weather, what is the likely color of the sky in Boston?
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] ChatHistory: [{"Role":{"Label":"user"},...
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Function count: 1
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Function call requests: HelperFunctions-GetCurrentUtcTime({})
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Function GetCurrentUtcTime invoking.
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Function GetCurrentUtcTime succeeded.
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Function result: Tue, 25 Jun 2024 15:30:16 GMT
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Function completed. Duration: 0.0011554s
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] ChatHistory: [{"Role":{"Label":"user"},...
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Function count: 1
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Function call requests: HelperFunctions-GetWeatherForCity({"cityName":"Boston"})
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Function GetWeatherForCity invoking.
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Function arguments: {"cityName":"Boston"}
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Function GetWeatherForCity succeeded.
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Function result: 61 and rainy
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Function completed. Duration: 0.0020878s
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Function InvokePromptAsync_Id succeeded.
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Function result: The sky in Boston would likely be gray due to the rain and current time of day.
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Usage: {"CompletionTokens":19,"PromptTokens":169,"TotalTokens":188}
+ // Transaction ID: [2d9ca2ce-8bf7-4d43-9f90-05eda7122aa2] Function completed. Duration: 5.397173s
+ }
+
+ ///
+ /// Filter which logs an information available during function invocation such as:
+ /// Function name, arguments, execution settings, result, duration, token usage.
+ ///
+ private sealed class FunctionInvocationLoggingFilter(ILogger logger) : IFunctionInvocationFilter
+ {
+ public async Task OnFunctionInvocationAsync(FunctionInvocationContext context, Func next)
+ {
+ long startingTimestamp = Stopwatch.GetTimestamp();
+
+ logger.LogInformation("Function {FunctionName} invoking.", context.Function.Name);
+
+ if (context.Arguments.Count > 0)
+ {
+ logger.LogTrace("Function arguments: {Arguments}", JsonSerializer.Serialize(context.Arguments));
+ }
+
+ if (logger.IsEnabled(LogLevel.Information) && context.Arguments.ExecutionSettings is not null)
+ {
+ logger.LogInformation("Execution settings: {Settings}", JsonSerializer.Serialize(context.Arguments.ExecutionSettings));
+ }
+
+ try
+ {
+ await next(context);
+
+ logger.LogInformation("Function {FunctionName} succeeded.", context.Function.Name);
+ logger.LogTrace("Function result: {Result}", context.Result.ToString());
+
+ if (logger.IsEnabled(LogLevel.Information))
+ {
+ var usage = context.Result.Metadata?["Usage"];
+
+ if (usage is not null)
+ {
+ logger.LogInformation("Usage: {Usage}", JsonSerializer.Serialize(usage));
+ }
+ }
+ }
+ catch (Exception exception)
+ {
+ logger.LogError(exception, "Function failed. Error: {Message}", exception.Message);
+ throw;
+ }
+ finally
+ {
+ if (logger.IsEnabled(LogLevel.Information))
+ {
+ TimeSpan duration = new((long)((Stopwatch.GetTimestamp() - startingTimestamp) * (10_000_000.0 / Stopwatch.Frequency)));
+
+ // Capturing the duration in seconds as per OpenTelemetry convention for instrument units:
+ // More information here: https://opentelemetry.io/docs/specs/semconv/general/metrics/#instrument-units
+ logger.LogInformation("Function completed. Duration: {Duration}s", duration.TotalSeconds);
+ }
+ }
+ }
+ }
+
+ ///
+ /// Filter which logs an information available during prompt rendering such as rendered prompt.
+ ///
+ private sealed class PromptRenderLoggingFilter(ILogger logger) : IPromptRenderFilter
+ {
+ public async Task OnPromptRenderAsync(PromptRenderContext context, Func next)
+ {
+ await next(context);
+
+ logger.LogTrace("Rendered prompt: {Prompt}", context.RenderedPrompt);
+ }
+ }
+
+ ///
+ /// Filter which logs an information available during automatic function calling such as:
+ /// Chat history, number of functions to call, which functions to call and their arguments.
+ ///
+ private sealed class AutoFunctionInvocationLoggingFilter(ILogger logger) : IAutoFunctionInvocationFilter
+ {
+ public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next)
+ {
+ if (logger.IsEnabled(LogLevel.Trace))
+ {
+ logger.LogTrace("ChatHistory: {ChatHistory}", JsonSerializer.Serialize(context.ChatHistory));
+ }
+
+ if (logger.IsEnabled(LogLevel.Debug))
+ {
+ logger.LogDebug("Function count: {FunctionCount}", context.FunctionCount);
+ }
+
+ var functionCalls = FunctionCallContent.GetFunctionCalls(context.ChatHistory.Last()).ToList();
+
+ if (logger.IsEnabled(LogLevel.Trace))
+ {
+ functionCalls.ForEach(functionCall
+ => logger.LogTrace(
+ "Function call requests: {PluginName}-{FunctionName}({Arguments})",
+ functionCall.PluginName,
+ functionCall.FunctionName,
+ JsonSerializer.Serialize(functionCall.Arguments)));
+ }
+
+ await next(context);
+ }
+ }
+}
diff --git a/dotnet/samples/Concepts/Functions/MethodFunctions_Yaml.cs b/dotnet/samples/Concepts/Functions/MethodFunctions_Yaml.cs
new file mode 100644
index 000000000000..d02bc2ff311c
--- /dev/null
+++ b/dotnet/samples/Concepts/Functions/MethodFunctions_Yaml.cs
@@ -0,0 +1,73 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Reflection;
+using Microsoft.SemanticKernel;
+
+namespace Functions;
+
+public class MethodFunctions_Yaml(ITestOutputHelper output) : BaseTest(output)
+{
+ private const string FunctionConfig = """
+ name: ValidateTaskId
+ description: Validate a task id.
+ input_variables:
+ - name: kernel
+ description: Kernel instance.
+ - name: taskId
+ description: Task identifier.
+ is_required: true
+ output_variable:
+ description: String indicating whether or not the task id is valid.
+ """;
+
+ ///
+ /// This example create a plugin and uses a separate configuration file for the function metadata.
+ ///
+ ///
+ /// Some reasons you would want to do this:
+ /// 1. It's not possible to modify the existing code to add the KernelFunction attribute.
+ /// 2. You want to keep the function metadata separate from the function implementation.
+ ///
+ [Fact]
+ public async Task CreateFunctionFromMethodWithYamlConfigAsync()
+ {
+ var kernel = new Kernel();
+
+ var config = KernelFunctionYaml.ToPromptTemplateConfig(FunctionConfig);
+
+ var target = new ValidatorPlugin();
+ MethodInfo method = target.GetType().GetMethod(config.Name!)!;
+ var functions = new List();
+ var functionName = config.Name;
+ var description = config.Description;
+ var parameters = config.InputVariables;
+ functions.Add(KernelFunctionFactory.CreateFromMethod(method, target, new()
+ {
+ FunctionName = functionName,
+ Description = description,
+ Parameters = parameters.Select(p => new KernelParameterMetadata(p.Name) { Description = p.Description, IsRequired = p.IsRequired }).ToList(),
+ }));
+
+ var plugin = kernel.ImportPluginFromFunctions("ValidatorPlugin", functions);
+
+ var function = plugin["ValidateTaskId"];
+ var result = await kernel.InvokeAsync(function, new() { { "taskId", "1234" } });
+ Console.WriteLine(result.GetValue());
+
+ Console.WriteLine("Function Metadata:");
+ Console.WriteLine(function.Metadata.Description);
+ Console.WriteLine(function.Metadata.Parameters[0].Description);
+ Console.WriteLine(function.Metadata.Parameters[1].Description);
+ }
+
+ ///
+ /// Plugin example with no KernelFunction or Description attributes.
+ ///
+ private sealed class ValidatorPlugin
+ {
+ public string ValidateTaskId(Kernel kernel, string taskId)
+ {
+ return taskId.Equals("1234", StringComparison.Ordinal) ? "Valid task id" : "Invalid task id";
+ }
+ }
+}
diff --git a/dotnet/samples/Concepts/Optimization/PluginSelection.cs b/dotnet/samples/Concepts/Optimization/PluginSelection.cs
new file mode 100644
index 000000000000..70c55456e72d
--- /dev/null
+++ b/dotnet/samples/Concepts/Optimization/PluginSelection.cs
@@ -0,0 +1,414 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.ComponentModel;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Microsoft.SemanticKernel.Embeddings;
+using Microsoft.SemanticKernel.Memory;
+
+namespace Optimization;
+
+///
+/// Single kernel instance may have multiple imported plugins/functions. It's possible to enable automatic function calling,
+/// so AI model will decide which functions to call for specific request.
+/// In case there are a lot of plugins/functions in application, some of them (or all of them) need to be shared with the model.
+/// This example shows how to use different plugin/function selection strategies, to share with AI only those functions,
+/// which are related to specific request.
+/// This technique should decrease token usage, as fewer functions will be shared with AI.
+/// It also helps to handle the scenario with a general purpose chat experience for a large enterprise,
+/// where there are so many plugins, that it's impossible to share all of them with AI model in a single request.
+///
+public sealed class PluginSelection(ITestOutputHelper output) : BaseTest(output)
+{
+ ///
+ /// This method shows how to select best functions to share with AI using vector similarity search.
+ ///
+ [Fact]
+ public async Task UsingVectorSearchWithKernelAsync()
+ {
+ // Initialize kernel with chat completion and embedding generation services.
+ // It's possible to combine different models from different AI providers to achieve the lowest token usage.
+ var builder = Kernel
+ .CreateBuilder()
+ .AddOpenAIChatCompletion("gpt-4", TestConfiguration.OpenAI.ApiKey)
+ .AddOpenAITextEmbeddingGeneration("text-embedding-3-small", TestConfiguration.OpenAI.ApiKey);
+
+ // Add logging.
+ var logger = this.LoggerFactory.CreateLogger();
+ builder.Services.AddSingleton(logger);
+
+ // Add memory store to keep functions and search for the most relevant ones for specific request.
+ builder.Services.AddSingleton();
+
+ // Add helper components defined in this example.
+ builder.Services.AddSingleton();
+ builder.Services.AddSingleton();
+ builder.Services.AddSingleton();
+
+ var kernel = builder.Build();
+
+ // Import plugins with different features.
+ kernel.ImportPluginFromType();
+ kernel.ImportPluginFromType();
+ kernel.ImportPluginFromType();
+ kernel.ImportPluginFromType();
+ kernel.ImportPluginFromType();
+
+ // Get registered plugin store to save information about plugins.
+ var pluginStore = kernel.GetRequiredService();
+
+ // Save information about kernel plugins in plugin store.
+ const string CollectionName = "functions";
+
+ await pluginStore.SaveAsync(CollectionName, kernel.Plugins);
+
+ // Enable automatic function calling by default.
+ var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
+
+ // Define kernel arguments with specific request.
+ var kernelArguments = new KernelArguments(executionSettings) { ["Request"] = "Provide latest headlines" };
+
+ // Invoke the request without plugin selection filter first for comparison purposes.
+ Console.WriteLine("Run without filter:");
+ var result = await kernel.InvokePromptAsync("{{$Request}}", kernelArguments);
+
+ Console.WriteLine(result);
+ Console.WriteLine(result.Metadata?["Usage"]?.AsJson()); // All functions were shared with AI. Total tokens: ~250
+
+ // Define plugin selection filter.
+ var filter = new PluginSelectionFilter(
+ functionProvider: kernel.GetRequiredService(),
+ logger: kernel.GetRequiredService(),
+ collectionName: CollectionName,
+ numberOfBestFunctions: 1);
+
+ // Add filter to kernel.
+ kernel.FunctionInvocationFilters.Add(filter);
+
+ // Invoke the request with plugin selection filter.
+ Console.WriteLine("\nRun with filter:");
+
+ // ToolCallBehavior.AutoInvokeKernelFunctions is used here as well as defined above.
+ // In case there will be related functions found for specific request, the ToolCallBehavior will be updated in filter to
+ // ToolCallBehavior.EnableFunctions(functions, autoInvoke: true) - this will allow to share only related set of functions with AI.
+ result = await kernel.InvokePromptAsync("{{$Request}}", kernelArguments);
+
+ Console.WriteLine(result);
+ Console.WriteLine(result.Metadata?["Usage"]?.AsJson()); // Just one function was shared with AI. Total tokens: ~150
+ }
+
+ [Fact]
+ public async Task UsingVectorSearchWithChatCompletionAsync()
+ {
+ // Initialize kernel with chat completion and embedding generation services.
+ // It's possible to combine different models from different AI providers to achieve the lowest token usage.
+ var builder = Kernel
+ .CreateBuilder()
+ .AddOpenAIChatCompletion("gpt-4", TestConfiguration.OpenAI.ApiKey)
+ .AddOpenAITextEmbeddingGeneration("text-embedding-3-small", TestConfiguration.OpenAI.ApiKey);
+
+ // Add logging.
+ var logger = this.LoggerFactory.CreateLogger();
+ builder.Services.AddSingleton(logger);
+
+ // Add memory store to keep functions and search for the most relevant ones for specific request.
+ builder.Services.AddSingleton();
+
+ // Add helper components defined in this example.
+ builder.Services.AddSingleton();
+ builder.Services.AddSingleton();
+ builder.Services.AddSingleton();
+
+ var kernel = builder.Build();
+
+ // Import plugins with different features.
+ kernel.ImportPluginFromType();
+ kernel.ImportPluginFromType();
+ kernel.ImportPluginFromType();
+ kernel.ImportPluginFromType();
+ kernel.ImportPluginFromType();
+
+ // Get registered plugin store to save information about plugins.
+ var pluginStore = kernel.GetRequiredService();
+
+ // Store information about kernel plugins in plugin store.
+ const string CollectionName = "functions";
+
+ await pluginStore.SaveAsync(CollectionName, kernel.Plugins);
+
+ // Enable automatic function calling by default.
+ var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
+
+ // Get function provider and find best functions for specified prompt.
+ var functionProvider = kernel.GetRequiredService();
+
+ const string Prompt = "Provide latest headlines";
+
+ var bestFunctions = await functionProvider.GetBestFunctionsAsync(CollectionName, Prompt, kernel.Plugins, numberOfBestFunctions: 1);
+
+ // If any found, update execution settings to share only selected functions.
+ if (bestFunctions.Count > 0)
+ {
+ bestFunctions.ForEach(function
+ => logger.LogInformation("Best function found: {PluginName}-{FunctionName}", function.PluginName, function.Name));
+
+ // Convert selected functions to OpenAI functions.
+ var openAIFunctions = bestFunctions.Select(function => function.Metadata.ToOpenAIFunction());
+
+ // Share only selected functions with AI.
+ executionSettings.ToolCallBehavior = ToolCallBehavior.EnableFunctions(openAIFunctions, autoInvoke: true);
+ }
+
+ // Get chat completion service and execute a request.
+ var chatCompletionService = kernel.GetRequiredService();
+
+ var chatHistory = new ChatHistory();
+ chatHistory.AddUserMessage(Prompt);
+
+ var result = await chatCompletionService.GetChatMessageContentAsync(chatHistory, executionSettings, kernel);
+
+ Console.WriteLine(result);
+ Console.WriteLine(result.Metadata?["Usage"]?.AsJson()); // Just one function was shared with AI. Total tokens: ~150
+ }
+
+ ///
+ /// Filter which performs vector similarity search on imported functions in
+ /// to select the best ones to share with AI.
+ ///
+ private sealed class PluginSelectionFilter(
+ IFunctionProvider functionProvider,
+ ILogger logger,
+ string collectionName,
+ int numberOfBestFunctions) : IFunctionInvocationFilter
+ {
+ public async Task OnFunctionInvocationAsync(FunctionInvocationContext context, Func next)
+ {
+ var request = GetRequestArgument(context.Arguments);
+
+ // Execute plugin selection logic for "InvokePrompt" function only, as main entry point.
+ if (context.Function.Name.Contains(nameof(KernelExtensions.InvokePromptAsync)) && !string.IsNullOrWhiteSpace(request))
+ {
+ // Get imported plugins in kernel.
+ var plugins = context.Kernel.Plugins;
+
+ // Find best functions for original request.
+ var bestFunctions = await functionProvider.GetBestFunctionsAsync(collectionName, request, plugins, numberOfBestFunctions);
+
+ // If any found, update execution settings and execute the request.
+ if (bestFunctions.Count > 0)
+ {
+ bestFunctions.ForEach(function
+ => logger.LogInformation("Best function found: {PluginName}-{FunctionName}", function.PluginName, function.Name));
+
+ var updatedExecutionSettings = GetExecutionSettings(context.Arguments, bestFunctions);
+
+ if (updatedExecutionSettings is not null)
+ {
+ // Update execution settings.
+ context.Arguments.ExecutionSettings = updatedExecutionSettings;
+
+ // Execute the request.
+ await next(context);
+
+ return;
+ }
+ }
+ }
+
+ // Otherwise, execute a request with default logic, where all plugins will be shared.
+ await next(context);
+ }
+
+ private static Dictionary? GetExecutionSettings(KernelArguments arguments, List functions)
+ {
+ var promptExecutionSettings = arguments.ExecutionSettings?[PromptExecutionSettings.DefaultServiceId];
+
+ if (promptExecutionSettings is not null && promptExecutionSettings is OpenAIPromptExecutionSettings openAIPromptExecutionSettings)
+ {
+ // Convert selected functions to OpenAI functions.
+ var openAIFunctions = functions.Select(function => function.Metadata.ToOpenAIFunction());
+
+ // Share only selected functions with AI.
+ openAIPromptExecutionSettings.ToolCallBehavior = ToolCallBehavior.EnableFunctions(openAIFunctions, autoInvoke: true);
+
+ return new() { [PromptExecutionSettings.DefaultServiceId] = openAIPromptExecutionSettings };
+ }
+
+ return null;
+ }
+
+ private static string? GetRequestArgument(KernelArguments arguments)
+ => arguments.TryGetValue("Request", out var requestObj) && requestObj is string request ? request : null;
+ }
+
+ #region Helper components
+
+ ///
+ /// Helper function key provider.
+ ///
+ public interface IFunctionKeyProvider
+ {
+ string GetFunctionKey(KernelFunction kernelFunction);
+ }
+
+ ///
+ /// Helper function provider to get best functions for specific request.
+ ///
+ public interface IFunctionProvider
+ {
+ Task> GetBestFunctionsAsync(
+ string collectionName,
+ string request,
+ KernelPluginCollection plugins,
+ int numberOfBestFunctions);
+ }
+
+ ///
+ /// Helper plugin store to save information about imported plugins in vector database.
+ ///
+ public interface IPluginStore
+ {
+ Task SaveAsync(string collectionName, KernelPluginCollection plugins);
+ }
+
+ public class FunctionKeyProvider : IFunctionKeyProvider
+ {
+ public string GetFunctionKey(KernelFunction kernelFunction)
+ {
+ return !string.IsNullOrWhiteSpace(kernelFunction.PluginName) ?
+ $"{kernelFunction.PluginName}-{kernelFunction.Name}" :
+ kernelFunction.Name;
+ }
+ }
+
+ public class FunctionProvider(
+ ITextEmbeddingGenerationService textEmbeddingGenerationService,
+ IMemoryStore memoryStore,
+ IFunctionKeyProvider functionKeyProvider) : IFunctionProvider
+ {
+ public async Task> GetBestFunctionsAsync(
+ string collectionName,
+ string request,
+ KernelPluginCollection plugins,
+ int numberOfBestFunctions)
+ {
+ // Generate embedding for original request.
+ var requestEmbedding = await textEmbeddingGenerationService.GenerateEmbeddingAsync(request);
+
+ // Find best functions to call for original request.
+ var memoryRecordKeys = await memoryStore
+ .GetNearestMatchesAsync(collectionName, requestEmbedding, limit: numberOfBestFunctions)
+ .Select(l => l.Item1.Key)
+ .ToListAsync();
+
+ return plugins
+ .SelectMany(plugin => plugin)
+ .Where(function => memoryRecordKeys.Contains(functionKeyProvider.GetFunctionKey(function)))
+ .ToList();
+ }
+ }
+
+ public class PluginStore(
+ ITextEmbeddingGenerationService textEmbeddingGenerationService,
+ IMemoryStore memoryStore,
+ IFunctionKeyProvider functionKeyProvider) : IPluginStore
+ {
+ public async Task SaveAsync(string collectionName, KernelPluginCollection plugins)
+ {
+ // Collect data about imported functions in kernel.
+ var memoryRecords = new List();
+ var functionsData = GetFunctionsData(plugins);
+
+ // Generate embedding for each function.
+ var embeddings = await textEmbeddingGenerationService
+ .GenerateEmbeddingsAsync(functionsData.Select(l => l.TextToVectorize).ToArray());
+
+ // Create memory record instances with function information and embedding.
+ for (var i = 0; i < functionsData.Count; i++)
+ {
+ var (function, textToVectorize) = functionsData[i];
+
+ memoryRecords.Add(MemoryRecord.LocalRecord(
+ id: functionKeyProvider.GetFunctionKey(function),
+ text: textToVectorize,
+ description: null,
+ embedding: embeddings[i]));
+ }
+
+ // Create collection and upsert all memory records for search.
+ // It's possible to do it only once and re-use the same functions for future requests.
+ await memoryStore.CreateCollectionAsync(collectionName);
+ await memoryStore.UpsertBatchAsync(collectionName, memoryRecords).ToListAsync();
+ }
+
+ private static List<(KernelFunction Function, string TextToVectorize)> GetFunctionsData(KernelPluginCollection plugins)
+ => plugins
+ .SelectMany(plugin => plugin)
+ .Select(function => (function, $"Plugin name: {function.PluginName}. Function name: {function.Name}. Description: {function.Description}"))
+ .ToList();
+ }
+
+ #endregion
+
+ #region Sample Plugins
+
+ private sealed class TimePlugin
+ {
+ [KernelFunction, Description("Provides the current date and time.")]
+ public string GetCurrentTime() => DateTime.Now.ToString("R");
+ }
+
+ private sealed class WeatherPlugin
+ {
+ [KernelFunction, Description("Provides weather information for various cities.")]
+ public string GetWeather(string cityName) => cityName switch
+ {
+ "Boston" => "61 and rainy",
+ "London" => "55 and cloudy",
+ "Miami" => "80 and sunny",
+ "Paris" => "60 and rainy",
+ "Tokyo" => "50 and sunny",
+ "Sydney" => "75 and sunny",
+ "Tel Aviv" => "80 and sunny",
+ _ => "No information",
+ };
+ }
+
+ private sealed class EmailPlugin(ILogger logger)
+ {
+ [KernelFunction, Description("Sends email to recipient with subject and body.")]
+ public void SendEmail(string from, string to, string subject, string body)
+ {
+ logger.LogInformation("Email has been sent successfully.");
+ }
+ }
+
+ private sealed class NewsPlugin
+ {
+ [KernelFunction, Description("Provides the latest news headlines.")]
+ public List GetLatestHeadlines() => new()
+ {
+ "Tourism Industry Sees Record Growth",
+ "Tech Company Releases New Product",
+ "Sports Team Wins Championship",
+ "New Study Reveals Health Benefits of Walking"
+ };
+ }
+
+ private sealed class CalendarPlugin
+ {
+ [KernelFunction, Description("Provides a list of upcoming events.")]
+ public List GetUpcomingEvents() => new()
+ {
+ "Meeting with Bob on June 22",
+ "Project deadline on June 30",
+ "Dentist appointment on July 5",
+ "Vacation starts on July 12"
+ };
+ }
+
+ #endregion
+}
diff --git a/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenAI_AzureKeyVault.cs b/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenAI_AzureKeyVault.cs
index f351f9af2636..e6c94622ddd6 100644
--- a/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenAI_AzureKeyVault.cs
+++ b/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenAI_AzureKeyVault.cs
@@ -11,6 +11,7 @@
namespace Plugins;
+[Obsolete("OpenAI plugins are deprecated and will be removed in a future version.")]
public class CreatePluginFromOpenAI_AzureKeyVault(ITestOutputHelper output) : BaseTest(output)
{
private const string SecretName = "Foo";
@@ -118,6 +119,7 @@ private async Task GetSecretFromAzureKeyVaultWithRetryAsync(Kernel kernel, Kerne
///
/// Provides authentication for HTTP requests to OpenAI using OAuth or verification tokens.
///
+[Obsolete("OpenAI plugins are deprecated and will be removed in a future version.")]
internal sealed class OpenAIAuthenticationProvider(Dictionary>? oAuthValues = null, Dictionary? credentials = null)
{
private readonly Dictionary> _oAuthValues = oAuthValues ?? [];
diff --git a/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenApiSpec_Github.cs b/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenApiSpec_Github.cs
index 5445f52b16c4..7d17658ad214 100644
--- a/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenApiSpec_Github.cs
+++ b/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenApiSpec_Github.cs
@@ -53,7 +53,7 @@ public async Task RunOpenAIPluginWithMetadataAsync()
WriteStringToStream(schemaStream, schema);
// Import an Open API plugin from a stream.
- var plugin = await kernel.CreatePluginFromOpenApiAsync("GithubVersionsApi", schemaStream, new OpenAIFunctionExecutionParameters(httpClient));
+ var plugin = await kernel.CreatePluginFromOpenApiAsync("GithubVersionsApi", schemaStream, new OpenApiFunctionExecutionParameters(httpClient));
// Get the function to be invoked and its metadata and extension properties.
var function = plugin["getVersions"];
diff --git a/dotnet/samples/Concepts/Plugins/OpenAIPlugins.cs b/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenApiSpec_Klarna.cs
similarity index 84%
rename from dotnet/samples/Concepts/Plugins/OpenAIPlugins.cs
rename to dotnet/samples/Concepts/Plugins/CreatePluginFromOpenApiSpec_Klarna.cs
index 77846b0d5290..40975f0df6ef 100644
--- a/dotnet/samples/Concepts/Plugins/OpenAIPlugins.cs
+++ b/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenApiSpec_Klarna.cs
@@ -5,16 +5,16 @@
namespace Plugins;
-public class OpenAIPlugins(ITestOutputHelper output) : BaseTest(output)
+public class CreatePluginFromOpenApiSpec_Klarna(ITestOutputHelper output) : BaseTest(output)
{
///
- /// This sample shows how to invoke an OpenAI plugin.
+ /// This sample shows how to invoke an OpenApi plugin.
///
///
/// You must provide the plugin name and a URI to the Open API manifest before running this sample.
///
[Fact(Skip = "Run it only after filling the template below")]
- public async Task InvokeOpenAIPluginAsync()
+ public async Task InvokeOpenApiPluginAsync()
{
Kernel kernel = new();
@@ -22,7 +22,7 @@ public async Task InvokeOpenAIPluginAsync()
using HttpClient httpClient = new();
// Import an Open AI plugin via URI
- var plugin = await kernel.ImportPluginFromOpenAIAsync("", new Uri(""), new OpenAIFunctionExecutionParameters(httpClient));
+ var plugin = await kernel.ImportPluginFromOpenApiAsync("", new Uri(""), new OpenApiFunctionExecutionParameters(httpClient));
// Add arguments for required parameters, arguments for optional ones can be skipped.
var arguments = new KernelArguments { [""] = "" };
@@ -39,11 +39,11 @@ public async Task InvokeOpenAIPluginAsync()
/// This sample shows how to invoke the Klarna Get Products function as an OpenAPI plugin.
///
[Fact]
- public async Task InvokeKlarnaGetProductsAsOpenAPIPluginAsync()
+ public async Task InvokeKlarnaGetProductsAsOpenApiPluginAsync()
{
Kernel kernel = new();
- var plugin = await kernel.ImportPluginFromOpenAIAsync("Klarna", new Uri("https://www.klarna.com/.well-known/ai-plugin.json"));
+ var plugin = await kernel.ImportPluginFromOpenApiAsync("Klarna", new Uri("https://www.klarna.com/us/shopping/public/openai/v0/api-docs/"));
var arguments = new KernelArguments
{
@@ -70,7 +70,7 @@ public async Task InvokeKlarnaGetProductsAsOpenAPIPluginAsync()
/// The contains the , and .
///
[Fact]
- public async Task UseDelegatingHandlerWhenInvokingAnOpenAPIFunctionAsync()
+ public async Task UseDelegatingHandlerWhenInvokingAnOpenApiFunctionAsync()
{
using var httpHandler = new HttpClientHandler();
using var customHandler = new CustomHandler(httpHandler);
@@ -78,7 +78,7 @@ public async Task UseDelegatingHandlerWhenInvokingAnOpenAPIFunctionAsync()
Kernel kernel = new();
- var plugin = await kernel.ImportPluginFromOpenAIAsync("Klarna", new Uri("https://www.klarna.com/.well-known/ai-plugin.json"), new OpenAIFunctionExecutionParameters(httpClient));
+ var plugin = await kernel.ImportPluginFromOpenApiAsync("Klarna", new Uri("https://www.klarna.com/us/shopping/public/openai/v0/api-docs/"), new OpenApiFunctionExecutionParameters(httpClient));
var arguments = new KernelArguments
{
diff --git a/dotnet/samples/Concepts/README.md b/dotnet/samples/Concepts/README.md
index 7eaa2a8a7ae6..fea33c88822e 100644
--- a/dotnet/samples/Concepts/README.md
+++ b/dotnet/samples/Concepts/README.md
@@ -69,6 +69,7 @@ Down below you can find the code snippets that demonstrate the usage of many Sem
- [PromptRenderFiltering](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/PromptRenderFiltering.cs)
- [RetryWithFilters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/RetryWithFilters.cs)
- [PIIDetectionWithFilters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/PIIDetectionWithFilters.cs)
+- [TelemetryWithFilters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Filtering/TelemetryWithFilters.cs)
## Functions - Invoking [`Method`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromMethod.cs) or [`Prompt`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromPrompt.cs) functions with [`Kernel`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/Kernel.cs)
@@ -78,6 +79,7 @@ Down below you can find the code snippets that demonstrate the usage of many Sem
- [MethodFunctions](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Functions/MethodFunctions.cs)
- [MethodFunctions_Advanced](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Functions/MethodFunctions_Advanced.cs)
- [MethodFunctions_Types](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Functions/MethodFunctions_Types.cs)
+- [MethodFunctions_Yaml](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Functions/MethodFunctions_Yaml.cs)
- [PromptFunctions_Inline](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Functions/PromptFunctions_Inline.cs)
- [PromptFunctions_MultipleArguments](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Functions/PromptFunctions_MultipleArguments.cs)
@@ -103,6 +105,7 @@ Down below you can find the code snippets that demonstrate the usage of many Sem
## Optimization - Examples of different cost and performance optimization techniques
- [FrugalGPT](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Optimization/FrugalGPT.cs)
+- [PluginSelection](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Optimization/PluginSelection.cs)
## Planners - Examples on using `Planners`
@@ -113,14 +116,14 @@ Down below you can find the code snippets that demonstrate the usage of many Sem
- [ApiManifestBasedPlugins](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/ApiManifestBasedPlugins.cs)
- [ConversationSummaryPlugin](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/ConversationSummaryPlugin.cs)
-- [CreatePluginFromOpenAI_AzureKeyVault](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenAI_AzureKeyVault.cs)
+- [CreatePluginFromOpenAI_AzureKeyVault](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenAI_AzureKeyVault.cs)(Deprecated)
- [CreatePluginFromOpenApiSpec_Github](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenApiSpec_Github.cs)
- [CreatePluginFromOpenApiSpec_Jira](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenApiSpec_Jira.cs)
+- [CreatePluginFromOpenApiSpec_Klarna](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/CreatePluginFromOpenApiSpec_Klarna.cs)
- [CustomMutablePlugin](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/CustomMutablePlugin.cs)
- [DescribeAllPluginsAndFunctions](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/DescribeAllPluginsAndFunctions.cs)
- [GroundednessChecks](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/GroundednessChecks.cs)
- [ImportPluginFromGrpc](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/ImportPluginFromGrpc.cs)
-- [OpenAIPlugins](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Plugins/OpenAIPlugins.cs)
## PromptTemplates - Using [`Templates`](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/IPromptTemplate.cs) with parametrization for `Prompt` rendering
diff --git a/dotnet/samples/Demos/FunctionInvocationApproval/README.md b/dotnet/samples/Demos/FunctionInvocationApproval/README.md
new file mode 100644
index 000000000000..99ff202e45fd
--- /dev/null
+++ b/dotnet/samples/Demos/FunctionInvocationApproval/README.md
@@ -0,0 +1,44 @@
+# Function Invocation Approval
+
+This console application shows how to use function invocation filter (`IFunctionInvocationFilter`) to invoke a Kernel Function only if such operation was approved.
+If function invocation was rejected, the result will contain the reason why, so the LLM can respond appropriately.
+
+The application uses a sample plugin which builds software by following these development stages: collection of requirements, design, implementation, testing and deployment.
+
+Each step can be approved or rejected. Based on that, the LLM will decide how to proceed.
+
+## Configuring Secrets
+
+The example requires credentials to access OpenAI or Azure OpenAI.
+
+If you have set up those credentials as secrets within Secret Manager or through environment variables for other samples from the solution in which this project is found, they will be re-used.
+
+### To set your secrets with Secret Manager:
+
+```
+cd dotnet/samples/Demos/FunctionInvocationApproval
+
+dotnet user-secrets init
+
+dotnet user-secrets set "OpenAI:ChatModelId" "..."
+dotnet user-secrets set "OpenAI:ApiKey" "..."
+
+dotnet user-secrets set "AzureOpenAI:ChatDeploymentName" "..."
+dotnet user-secrets set "AzureOpenAI:Endpoint" "https://... .openai.azure.com/"
+dotnet user-secrets set "AzureOpenAI:ApiKey" "..."
+```
+
+### To set your secrets with environment variables
+
+Use these names:
+
+```
+# OpenAI
+OpenAI__ChatModelId
+OpenAI__ApiKey
+
+# Azure OpenAI
+AzureOpenAI__ChatDeploymentName
+AzureOpenAI__Endpoint
+AzureOpenAI__ApiKey
+```
diff --git a/dotnet/samples/Demos/HomeAutomation/README.md b/dotnet/samples/Demos/HomeAutomation/README.md
index 09907e5363e5..aa5c33cec248 100644
--- a/dotnet/samples/Demos/HomeAutomation/README.md
+++ b/dotnet/samples/Demos/HomeAutomation/README.md
@@ -12,7 +12,7 @@ If you have set up those credentials as secrets within Secret Manager or through
### To set your secrets with Secret Manager:
```
-cd dotnet/samples/HouseAutomation
+cd dotnet/samples/Demos/HouseAutomation
dotnet user-secrets init
diff --git a/dotnet/samples/GettingStartedWithAgents/Step1_Agent.cs b/dotnet/samples/GettingStartedWithAgents/Step1_Agent.cs
index 7ecfb2c5348f..ddab79f032b0 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step1_Agent.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step1_Agent.cs
@@ -15,7 +15,7 @@ public class Step1_Agent(ITestOutputHelper output) : BaseTest(output)
private const string ParrotInstructions = "Repeat the user message in the voice of a pirate and then end with a parrot sound.";
[Fact]
- public async Task RunAsync()
+ public async Task UseSingleChatCompletionAgentAsync()
{
// Define the agent
ChatCompletionAgent agent =
@@ -27,7 +27,7 @@ public async Task RunAsync()
};
/// Create a chat for agent interaction. For more, .
- AgentGroupChat chat = new();
+ ChatHistory chat = [];
// Respond to user input
await InvokeAgentAsync("Fortune favors the bold.");
@@ -37,11 +37,11 @@ public async Task RunAsync()
// Local function to invoke agent and display the conversation messages.
async Task InvokeAgentAsync(string input)
{
- chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input));
+ chat.Add(new ChatMessageContent(AuthorRole.User, input));
Console.WriteLine($"# {AuthorRole.User}: '{input}'");
- await foreach (var content in chat.InvokeAsync(agent))
+ await foreach (ChatMessageContent content in agent.InvokeAsync(chat))
{
Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
}
diff --git a/dotnet/samples/GettingStartedWithAgents/Step2_Plugins.cs b/dotnet/samples/GettingStartedWithAgents/Step2_Plugins.cs
index 708fab321f04..61737de498be 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step2_Plugins.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step2_Plugins.cs
@@ -17,7 +17,7 @@ public class Step2_Plugins(ITestOutputHelper output) : BaseTest(output)
private const string HostInstructions = "Answer questions about the menu.";
[Fact]
- public async Task RunAsync()
+ public async Task UseChatCompletionWithPluginAgentAsync()
{
// Define the agent
ChatCompletionAgent agent =
@@ -34,7 +34,7 @@ public async Task RunAsync()
agent.Kernel.Plugins.Add(plugin);
/// Create a chat for agent interaction. For more, .
- AgentGroupChat chat = new();
+ ChatHistory chat = [];
// Respond to user input, invoking functions where appropriate.
await InvokeAgentAsync("Hello");
@@ -45,10 +45,10 @@ public async Task RunAsync()
// Local function to invoke agent and display the conversation messages.
async Task InvokeAgentAsync(string input)
{
- chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input));
+ chat.Add(new ChatMessageContent(AuthorRole.User, input));
Console.WriteLine($"# {AuthorRole.User}: '{input}'");
- await foreach (var content in chat.InvokeAsync(agent))
+ await foreach (var content in agent.InvokeAsync(chat))
{
Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
}
diff --git a/dotnet/samples/GettingStartedWithAgents/Step3_Chat.cs b/dotnet/samples/GettingStartedWithAgents/Step3_Chat.cs
index c539532ef52c..0c9c60f870a7 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step3_Chat.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step3_Chat.cs
@@ -34,7 +34,7 @@ Consider suggestions when refining an idea.
""";
[Fact]
- public async Task RunAsync()
+ public async Task UseAgentGroupChatWithTwoAgentsAsync()
{
// Define the agents
ChatCompletionAgent agentReviewer =
diff --git a/dotnet/samples/GettingStartedWithAgents/Step4_KernelFunctionStrategies.cs b/dotnet/samples/GettingStartedWithAgents/Step4_KernelFunctionStrategies.cs
index 06dfe0fcc4ed..cd99531ec27b 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step4_KernelFunctionStrategies.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step4_KernelFunctionStrategies.cs
@@ -33,7 +33,7 @@ Consider suggestions when refining an idea.
""";
[Fact]
- public async Task RunAsync()
+ public async Task UseKernelFunctionStrategiesWithAgentGroupChatAsync()
{
// Define the agents
ChatCompletionAgent agentReviewer =
diff --git a/dotnet/samples/GettingStartedWithAgents/Step5_JsonResult.cs b/dotnet/samples/GettingStartedWithAgents/Step5_JsonResult.cs
index e5ec480f8773..b1e83a202505 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step5_JsonResult.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step5_JsonResult.cs
@@ -28,7 +28,7 @@ Think step-by-step and rate the user input on creativity and expressivness from
""";
[Fact]
- public async Task RunAsync()
+ public async Task UseKernelFunctionStrategiesWithJsonResultAsync()
{
// Define the agents
ChatCompletionAgent agent =
diff --git a/dotnet/samples/GettingStartedWithAgents/Step6_DependencyInjection.cs b/dotnet/samples/GettingStartedWithAgents/Step6_DependencyInjection.cs
index c759053dbe1c..a7e3b9b41450 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step6_DependencyInjection.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step6_DependencyInjection.cs
@@ -30,7 +30,7 @@ Think step-by-step and rate the user input on creativity and expressivness from
""";
[Fact]
- public async Task RunAsync()
+ public async Task UseDependencyInjectionToCreateAgentAsync()
{
ServiceCollection serviceContainer = new();
diff --git a/dotnet/samples/GettingStartedWithAgents/Step7_Logging.cs b/dotnet/samples/GettingStartedWithAgents/Step7_Logging.cs
index 4b8b48c5ef87..4372d71e37f8 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step7_Logging.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step7_Logging.cs
@@ -37,7 +37,7 @@ Consider suggestions when refining an idea.
""";
[Fact]
- public async Task RunAsync()
+ public async Task UseLoggerFactoryWithAgentGroupChatAsync()
{
// Define the agents
ChatCompletionAgent agentReviewer =
@@ -46,6 +46,7 @@ public async Task RunAsync()
Instructions = ReviewerInstructions,
Name = ReviewerName,
Kernel = this.CreateKernelWithChatCompletion(),
+ LoggerFactory = this.LoggerFactory,
};
ChatCompletionAgent agentWriter =
@@ -54,6 +55,7 @@ public async Task RunAsync()
Instructions = CopyWriterInstructions,
Name = CopyWriterName,
Kernel = this.CreateKernelWithChatCompletion(),
+ LoggerFactory = this.LoggerFactory,
};
// Create a chat for agent interaction.
diff --git a/dotnet/samples/GettingStartedWithAgents/Step8_OpenAIAssistant.cs b/dotnet/samples/GettingStartedWithAgents/Step8_OpenAIAssistant.cs
index 32ce38da8b2f..09afcfc44826 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step8_OpenAIAssistant.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step8_OpenAIAssistant.cs
@@ -18,7 +18,7 @@ public class Step8_OpenAIAssistant(ITestOutputHelper output) : BaseTest(output)
private const string HostInstructions = "Answer questions about the menu.";
[Fact]
- public async Task RunAsync()
+ public async Task UseSingleOpenAIAssistantAgentAsync()
{
// Define the agent
OpenAIAssistantAgent agent =
@@ -37,7 +37,7 @@ await OpenAIAssistantAgent.CreateAsync(
agent.Kernel.Plugins.Add(plugin);
// Create a chat for agent interaction.
- var chat = new AgentGroupChat();
+ string threadId = await agent.CreateThreadAsync();
// Respond to user input
try
@@ -49,19 +49,23 @@ await OpenAIAssistantAgent.CreateAsync(
}
finally
{
+ await agent.DeleteThreadAsync(threadId);
await agent.DeleteAsync();
}
// Local function to invoke agent and display the conversation messages.
async Task InvokeAgentAsync(string input)
{
- chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input));
+ await agent.AddChatMessageAsync(threadId, new ChatMessageContent(AuthorRole.User, input));
Console.WriteLine($"# {AuthorRole.User}: '{input}'");
- await foreach (var content in chat.InvokeAsync(agent))
+ await foreach (var content in agent.InvokeAsync(threadId))
{
- Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
+ if (content.Role != AuthorRole.Tool)
+ {
+ Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
+ }
}
}
}
diff --git a/dotnet/src/Agents/Abstractions/Agent.cs b/dotnet/src/Agents/Abstractions/Agent.cs
index 4ebe3d1416cf..8af2de3b0869 100644
--- a/dotnet/src/Agents/Abstractions/Agent.cs
+++ b/dotnet/src/Agents/Abstractions/Agent.cs
@@ -4,6 +4,7 @@
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
+using Microsoft.Extensions.Logging.Abstractions;
namespace Microsoft.SemanticKernel.Agents;
@@ -36,6 +37,16 @@ public abstract class Agent
///
public string? Name { get; init; }
+ ///
+ /// A for this .
+ ///
+ public ILoggerFactory LoggerFactory { get; init; } = NullLoggerFactory.Instance;
+
+ ///
+ /// The associated with this .
+ ///
+ protected ILogger Logger => this._logger ??= this.LoggerFactory.CreateLogger(this.GetType());
+
///
/// Set of keys to establish channel affinity. Minimum expected key-set:
///
@@ -53,12 +64,13 @@ public abstract class Agent
///
/// Produce the an appropriate for the agent type.
///
- /// An agent specific logger.
/// The to monitor for cancellation requests. The default is .
/// An appropriate for the agent type.
///
/// Every agent conversation, or , will establish one or more
/// objects according to the specific type.
///
- protected internal abstract Task CreateChannelAsync(ILogger logger, CancellationToken cancellationToken);
+ protected internal abstract Task CreateChannelAsync(CancellationToken cancellationToken);
+
+ private ILogger? _logger;
}
diff --git a/dotnet/src/Agents/Abstractions/AgentChat.cs b/dotnet/src/Agents/Abstractions/AgentChat.cs
index 26b51928c362..7e7dea00a805 100644
--- a/dotnet/src/Agents/Abstractions/AgentChat.cs
+++ b/dotnet/src/Agents/Abstractions/AgentChat.cs
@@ -256,10 +256,7 @@ async Task GetOrCreateChannelAsync()
{
this.Logger.LogDebug("[{MethodName}] Creating channel for {AgentType}: {AgentId}", nameof(InvokeAgentAsync), agent.GetType(), agent.Id);
- // Creating an agent-typed logger for CreateChannelAsync
- channel = await agent.CreateChannelAsync(this.LoggerFactory.CreateLogger(agent.GetType()), cancellationToken).ConfigureAwait(false);
- // Creating an channel-typed logger for the channel
- channel.Logger = this.LoggerFactory.CreateLogger(channel.GetType());
+ channel = await agent.CreateChannelAsync(cancellationToken).ConfigureAwait(false);
this._agentChannels.Add(channelKey, channel);
diff --git a/dotnet/src/Agents/Abstractions/AggregatorAgent.cs b/dotnet/src/Agents/Abstractions/AggregatorAgent.cs
index c236cd7a565a..00964fdc9e57 100644
--- a/dotnet/src/Agents/Abstractions/AggregatorAgent.cs
+++ b/dotnet/src/Agents/Abstractions/AggregatorAgent.cs
@@ -44,14 +44,14 @@ protected internal override IEnumerable GetChannelKeys()
}
///
- protected internal override Task CreateChannelAsync(ILogger logger, CancellationToken cancellationToken)
+ protected internal override Task CreateChannelAsync(CancellationToken cancellationToken)
{
- logger.LogDebug("[{MethodName}] Creating channel {ChannelType}", nameof(CreateChannelAsync), nameof(AggregatorChannel));
+ this.Logger.LogDebug("[{MethodName}] Creating channel {ChannelType}", nameof(CreateChannelAsync), nameof(AggregatorChannel));
AgentChat chat = chatProvider.Invoke();
AggregatorChannel channel = new(chat);
- logger.LogInformation("[{MethodName}] Created channel {ChannelType} ({ChannelMode}) with: {AgentChatType}", nameof(CreateChannelAsync), nameof(AggregatorChannel), this.Mode, chat.GetType());
+ this.Logger.LogInformation("[{MethodName}] Created channel {ChannelType} ({ChannelMode}) with: {AgentChatType}", nameof(CreateChannelAsync), nameof(AggregatorChannel), this.Mode, chat.GetType());
return Task.FromResult(channel);
}
diff --git a/dotnet/src/Agents/Abstractions/ChatHistoryChannel.cs b/dotnet/src/Agents/Abstractions/ChatHistoryChannel.cs
index 281529bffd8e..2bb5616ff959 100644
--- a/dotnet/src/Agents/Abstractions/ChatHistoryChannel.cs
+++ b/dotnet/src/Agents/Abstractions/ChatHistoryChannel.cs
@@ -25,7 +25,7 @@ protected internal sealed override async IAsyncEnumerable In
throw new KernelException($"Invalid channel binding for agent: {agent.Id} ({agent.GetType().FullName})");
}
- await foreach (var message in historyHandler.InvokeAsync(this._history, this.Logger, cancellationToken).ConfigureAwait(false))
+ await foreach (ChatMessageContent message in historyHandler.InvokeAsync(this._history, cancellationToken).ConfigureAwait(false))
{
this._history.Add(message);
diff --git a/dotnet/src/Agents/Abstractions/ChatHistoryKernelAgent.cs b/dotnet/src/Agents/Abstractions/ChatHistoryKernelAgent.cs
index ee86a7af770e..3de87da3de06 100644
--- a/dotnet/src/Agents/Abstractions/ChatHistoryKernelAgent.cs
+++ b/dotnet/src/Agents/Abstractions/ChatHistoryKernelAgent.cs
@@ -3,6 +3,7 @@
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.ChatCompletion;
namespace Microsoft.SemanticKernel.Agents;
@@ -18,14 +19,24 @@ protected internal sealed override IEnumerable GetChannelKeys()
}
///
- protected internal sealed override Task CreateChannelAsync(ILogger logger, CancellationToken cancellationToken)
+ protected internal sealed override Task CreateChannelAsync(CancellationToken cancellationToken)
{
- return Task.FromResult(new ChatHistoryChannel());
+ ChatHistoryChannel channel =
+ new()
+ {
+ Logger = this.LoggerFactory.CreateLogger()
+ };
+
+ return Task.FromResult(channel);
}
///
public abstract IAsyncEnumerable InvokeAsync(
- IReadOnlyList history,
- ILogger logger,
+ ChatHistory history,
+ CancellationToken cancellationToken = default);
+
+ ///
+ public abstract IAsyncEnumerable InvokeStreamingAsync(
+ ChatHistory history,
CancellationToken cancellationToken = default);
}
diff --git a/dotnet/src/Agents/Abstractions/IChatHistoryHandler.cs b/dotnet/src/Agents/Abstractions/IChatHistoryHandler.cs
index f377d38ba58e..8b7dab748c81 100644
--- a/dotnet/src/Agents/Abstractions/IChatHistoryHandler.cs
+++ b/dotnet/src/Agents/Abstractions/IChatHistoryHandler.cs
@@ -1,7 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.
using System.Collections.Generic;
using System.Threading;
-using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.ChatCompletion;
namespace Microsoft.SemanticKernel.Agents;
@@ -11,14 +11,22 @@ namespace Microsoft.SemanticKernel.Agents;
public interface IChatHistoryHandler
{
///
- /// Entry point for calling into an agent from a a .
+ /// Entry point for calling into an agent from a .
///
/// The chat history at the point the channel is created.
- /// The logger associated with the
/// The to monitor for cancellation requests. The default is .
/// Asynchronous enumeration of messages.
IAsyncEnumerable InvokeAsync(
- IReadOnlyList history,
- ILogger logger,
+ ChatHistory history,
+ CancellationToken cancellationToken = default);
+
+ ///
+ /// Entry point for calling into an agent from a for streaming content.
+ ///
+ /// The chat history at the point the channel is created.
+ /// The to monitor for cancellation requests. The default is .
+ /// Asynchronous enumeration of streaming content.
+ public abstract IAsyncEnumerable InvokeStreamingAsync(
+ ChatHistory history,
CancellationToken cancellationToken = default);
}
diff --git a/dotnet/src/Agents/Core/ChatCompletionAgent.cs b/dotnet/src/Agents/Core/ChatCompletionAgent.cs
index e8f9378e8a39..b84d29494b8e 100644
--- a/dotnet/src/Agents/Core/ChatCompletionAgent.cs
+++ b/dotnet/src/Agents/Core/ChatCompletionAgent.cs
@@ -2,6 +2,7 @@
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using System.Threading;
+using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel.ChatCompletion;
@@ -23,22 +24,16 @@ public sealed class ChatCompletionAgent : ChatHistoryKernelAgent
///
public override async IAsyncEnumerable InvokeAsync(
- IReadOnlyList history,
- ILogger logger,
+ ChatHistory history,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
- var chatCompletionService = this.Kernel.GetRequiredService();
+ IChatCompletionService chatCompletionService = this.Kernel.GetRequiredService();
- ChatHistory chat = [];
- if (!string.IsNullOrWhiteSpace(this.Instructions))
- {
- chat.Add(new ChatMessageContent(AuthorRole.System, this.Instructions) { AuthorName = this.Name });
- }
- chat.AddRange(history);
+ ChatHistory chat = this.SetupAgentChatHistory(history);
int messageCount = chat.Count;
- logger.LogDebug("[{MethodName}] Invoking {ServiceType}.", nameof(InvokeAsync), chatCompletionService.GetType());
+ this.Logger.LogDebug("[{MethodName}] Invoking {ServiceType}.", nameof(InvokeAsync), chatCompletionService.GetType());
IReadOnlyList messages =
await chatCompletionService.GetChatMessageContentsAsync(
@@ -47,9 +42,9 @@ await chatCompletionService.GetChatMessageContentsAsync(
this.Kernel,
cancellationToken).ConfigureAwait(false);
- if (logger.IsEnabled(LogLevel.Information)) // Avoid boxing if not enabled
+ if (this.Logger.IsEnabled(LogLevel.Information)) // Avoid boxing if not enabled
{
- logger.LogInformation("[{MethodName}] Invoked {ServiceType} with message count: {MessageCount}.", nameof(InvokeAsync), chatCompletionService.GetType(), messages.Count);
+ this.Logger.LogInformation("[{MethodName}] Invoked {ServiceType} with message count: {MessageCount}.", nameof(InvokeAsync), chatCompletionService.GetType(), messages.Count);
}
// Capture mutated messages related function calling / tools
@@ -59,7 +54,7 @@ await chatCompletionService.GetChatMessageContentsAsync(
message.AuthorName = this.Name;
- yield return message;
+ history.Add(message);
}
foreach (ChatMessageContent message in messages ?? [])
@@ -70,4 +65,62 @@ await chatCompletionService.GetChatMessageContentsAsync(
yield return message;
}
}
+
+ ///
+ public override async IAsyncEnumerable InvokeStreamingAsync(
+ ChatHistory history,
+ [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ IChatCompletionService chatCompletionService = this.Kernel.GetRequiredService();
+
+ ChatHistory chat = this.SetupAgentChatHistory(history);
+
+ int messageCount = chat.Count;
+
+ this.Logger.LogDebug("[{MethodName}] Invoking {ServiceType}.", nameof(InvokeAsync), chatCompletionService.GetType());
+
+ IAsyncEnumerable messages =
+ chatCompletionService.GetStreamingChatMessageContentsAsync(
+ chat,
+ this.ExecutionSettings,
+ this.Kernel,
+ cancellationToken);
+
+ if (this.Logger.IsEnabled(LogLevel.Information))
+ {
+ this.Logger.LogInformation("[{MethodName}] Invoked {ServiceType} with streaming messages.", nameof(InvokeAsync), chatCompletionService.GetType());
+ }
+
+ // Capture mutated messages related function calling / tools
+ for (int messageIndex = messageCount; messageIndex < chat.Count; messageIndex++)
+ {
+ ChatMessageContent message = chat[messageIndex];
+
+ message.AuthorName = this.Name;
+
+ history.Add(message);
+ }
+
+ await foreach (StreamingChatMessageContent message in messages.ConfigureAwait(false))
+ {
+ // TODO: MESSAGE SOURCE - ISSUE #5731
+ message.AuthorName = this.Name;
+
+ yield return message;
+ }
+ }
+
+ private ChatHistory SetupAgentChatHistory(IReadOnlyList history)
+ {
+ ChatHistory chat = [];
+
+ if (!string.IsNullOrWhiteSpace(this.Instructions))
+ {
+ chat.Add(new ChatMessageContent(AuthorRole.System, this.Instructions) { AuthorName = this.Name });
+ }
+
+ chat.AddRange(history);
+
+ return chat;
+ }
}
diff --git a/dotnet/src/Agents/OpenAI/AssistantThreadActions.cs b/dotnet/src/Agents/OpenAI/AssistantThreadActions.cs
new file mode 100644
index 000000000000..37649844a230
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/AssistantThreadActions.cs
@@ -0,0 +1,525 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using System.Linq;
+using System.Net;
+using System.Runtime.CompilerServices;
+using System.Text.Json;
+using System.Threading;
+using System.Threading.Tasks;
+using Azure;
+using Azure.AI.OpenAI.Assistants;
+using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.ChatCompletion;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI;
+
+///
+/// Actions associated with an Open Assistant thread.
+///
+internal static class AssistantThreadActions
+{
+ /*AssistantsClient client, string threadId, OpenAIAssistantConfiguration.PollingConfiguration pollingConfiguration*/
+ private const string FunctionDelimiter = "-";
+
+ private static readonly HashSet s_messageRoles =
+ [
+ AuthorRole.User,
+ AuthorRole.Assistant,
+ ];
+
+ private static readonly HashSet s_pollingStatuses =
+ [
+ RunStatus.Queued,
+ RunStatus.InProgress,
+ RunStatus.Cancelling,
+ ];
+
+ private static readonly HashSet s_terminalStatuses =
+ [
+ RunStatus.Expired,
+ RunStatus.Failed,
+ RunStatus.Cancelled,
+ ];
+
+ ///
+ /// Create a message in the specified thread.
+ ///
+ /// The assistant client
+ /// The thread identifier
+ /// The message to add
+ /// The to monitor for cancellation requests. The default is .
+ /// if a system message is present, without taking any other action
+ public static async Task CreateMessageAsync(AssistantsClient client, string threadId, ChatMessageContent message, CancellationToken cancellationToken)
+ {
+ if (!s_messageRoles.Contains(message.Role))
+ {
+ throw new KernelException($"Invalid message role: {message.Role}");
+ }
+
+ if (string.IsNullOrWhiteSpace(message.Content))
+ {
+ return;
+ }
+
+ await client.CreateMessageAsync(
+ threadId,
+ message.Role.ToMessageRole(),
+ message.Content,
+ cancellationToken: cancellationToken).ConfigureAwait(false);
+ }
+
+ ///
+ /// Retrieves the thread messages.
+ ///
+ /// The assistant client
+ /// The thread identifier
+ /// The to monitor for cancellation requests. The default is .
+ /// Asynchronous enumeration of messages.
+ public static async IAsyncEnumerable GetMessagesAsync(AssistantsClient client, string threadId, [EnumeratorCancellation] CancellationToken cancellationToken)
+ {
+ Dictionary agentNames = []; // Cache agent names by their identifier
+
+ PageableList messages;
+
+ string? lastId = null;
+ do
+ {
+ messages = await client.GetMessagesAsync(threadId, limit: 100, ListSortOrder.Descending, after: lastId, null, cancellationToken).ConfigureAwait(false);
+ foreach (ThreadMessage message in messages)
+ {
+ AuthorRole role = new(message.Role.ToString());
+
+ string? assistantName = null;
+ if (!string.IsNullOrWhiteSpace(message.AssistantId) &&
+ !agentNames.TryGetValue(message.AssistantId, out assistantName))
+ {
+ Assistant assistant = await client.GetAssistantAsync(message.AssistantId, cancellationToken).ConfigureAwait(false);
+ if (!string.IsNullOrWhiteSpace(assistant.Name))
+ {
+ agentNames.Add(assistant.Id, assistant.Name);
+ }
+ }
+
+ assistantName ??= message.AssistantId;
+
+ foreach (MessageContent item in message.ContentItems)
+ {
+ ChatMessageContent? content = null;
+
+ if (item is MessageTextContent contentMessage)
+ {
+ content = GenerateTextMessageContent(assistantName, role, contentMessage);
+ }
+ else if (item is MessageImageFileContent contentImage)
+ {
+ content = GenerateImageFileContent(assistantName, role, contentImage);
+ }
+
+ if (content is not null)
+ {
+ yield return content;
+ }
+ }
+
+ lastId = message.Id;
+ }
+ }
+ while (messages.HasMore);
+ }
+
+ ///
+ /// Invoke the assistant on the specified thread.
+ ///
+ /// The assistant agent to interact with the thread.
+ /// The assistant client
+ /// The thread identifier
+ /// Config to utilize when polling for run state.
+ /// The logger to utilize (might be agent or channel scoped)
+ /// The to monitor for cancellation requests. The default is .
+ /// Asynchronous enumeration of messages.
+ public static async IAsyncEnumerable InvokeAsync(
+ OpenAIAssistantAgent agent,
+ AssistantsClient client,
+ string threadId,
+ OpenAIAssistantConfiguration.PollingConfiguration pollingConfiguration,
+ ILogger logger,
+ [EnumeratorCancellation] CancellationToken cancellationToken)
+ {
+ if (agent.IsDeleted)
+ {
+ throw new KernelException($"Agent Failure - {nameof(OpenAIAssistantAgent)} agent is deleted: {agent.Id}.");
+ }
+
+ ToolDefinition[]? tools = [.. agent.Tools, .. agent.Kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name, FunctionDelimiter)))];
+
+ logger.LogDebug("[{MethodName}] Creating run for agent/thrad: {AgentId}/{ThreadId}", nameof(InvokeAsync), agent.Id, threadId);
+
+ CreateRunOptions options =
+ new(agent.Id)
+ {
+ OverrideInstructions = agent.Instructions,
+ OverrideTools = tools,
+ };
+
+ // Create run
+ ThreadRun run = await client.CreateRunAsync(threadId, options, cancellationToken).ConfigureAwait(false);
+
+ logger.LogInformation("[{MethodName}] Created run: {RunId}", nameof(InvokeAsync), run.Id);
+
+ // Evaluate status and process steps and messages, as encountered.
+ HashSet processedStepIds = [];
+ Dictionary functionSteps = [];
+
+ do
+ {
+ // Poll run and steps until actionable
+ PageableList steps = await PollRunStatusAsync().ConfigureAwait(false);
+
+ // Is in terminal state?
+ if (s_terminalStatuses.Contains(run.Status))
+ {
+ throw new KernelException($"Agent Failure - Run terminated: {run.Status} [{run.Id}]: {run.LastError?.Message ?? "Unknown"}");
+ }
+
+ // Is tool action required?
+ if (run.Status == RunStatus.RequiresAction)
+ {
+ logger.LogDebug("[{MethodName}] Processing run steps: {RunId}", nameof(InvokeAsync), run.Id);
+
+ // Execute functions in parallel and post results at once.
+ FunctionCallContent[] activeFunctionSteps = steps.Data.SelectMany(step => ParseFunctionStep(agent, step)).ToArray();
+ if (activeFunctionSteps.Length > 0)
+ {
+ // Emit function-call content
+ yield return GenerateFunctionCallContent(agent.GetName(), activeFunctionSteps);
+
+ // Invoke functions for each tool-step
+ IEnumerable> functionResultTasks = ExecuteFunctionSteps(agent, activeFunctionSteps, cancellationToken);
+
+ // Block for function results
+ FunctionResultContent[] functionResults = await Task.WhenAll(functionResultTasks).ConfigureAwait(false);
+
+ // Process tool output
+ ToolOutput[] toolOutputs = GenerateToolOutputs(functionResults);
+
+ await client.SubmitToolOutputsToRunAsync(run, toolOutputs, cancellationToken).ConfigureAwait(false);
+ }
+
+ if (logger.IsEnabled(LogLevel.Information)) // Avoid boxing if not enabled
+ {
+ logger.LogInformation("[{MethodName}] Processed #{MessageCount} run steps: {RunId}", nameof(InvokeAsync), activeFunctionSteps.Length, run.Id);
+ }
+ }
+
+ // Enumerate completed messages
+ logger.LogDebug("[{MethodName}] Processing run messages: {RunId}", nameof(InvokeAsync), run.Id);
+
+ IEnumerable completedStepsToProcess =
+ steps
+ .Where(s => s.CompletedAt.HasValue && !processedStepIds.Contains(s.Id))
+ .OrderBy(s => s.CreatedAt);
+
+ int messageCount = 0;
+ foreach (RunStep completedStep in completedStepsToProcess)
+ {
+ if (completedStep.Type.Equals(RunStepType.ToolCalls))
+ {
+ RunStepToolCallDetails toolCallDetails = (RunStepToolCallDetails)completedStep.StepDetails;
+
+ foreach (RunStepToolCall toolCall in toolCallDetails.ToolCalls)
+ {
+ ChatMessageContent? content = null;
+
+ // Process code-interpreter content
+ if (toolCall is RunStepCodeInterpreterToolCall toolCodeInterpreter)
+ {
+ content = GenerateCodeInterpreterContent(agent.GetName(), toolCodeInterpreter);
+ }
+ // Process function result content
+ else if (toolCall is RunStepFunctionToolCall toolFunction)
+ {
+ FunctionCallContent functionStep = functionSteps[toolFunction.Id]; // Function step always captured on invocation
+ content = GenerateFunctionResultContent(agent.GetName(), functionStep, toolFunction.Output);
+ }
+
+ if (content is not null)
+ {
+ ++messageCount;
+
+ yield return content;
+ }
+ }
+ }
+ else if (completedStep.Type.Equals(RunStepType.MessageCreation))
+ {
+ RunStepMessageCreationDetails messageCreationDetails = (RunStepMessageCreationDetails)completedStep.StepDetails;
+
+ // Retrieve the message
+ ThreadMessage? message = await RetrieveMessageAsync(messageCreationDetails, cancellationToken).ConfigureAwait(false);
+
+ if (message is not null)
+ {
+ AuthorRole role = new(message.Role.ToString());
+
+ foreach (MessageContent itemContent in message.ContentItems)
+ {
+ ChatMessageContent? content = null;
+
+ // Process text content
+ if (itemContent is MessageTextContent contentMessage)
+ {
+ content = GenerateTextMessageContent(agent.GetName(), role, contentMessage);
+ }
+ // Process image content
+ else if (itemContent is MessageImageFileContent contentImage)
+ {
+ content = GenerateImageFileContent(agent.GetName(), role, contentImage);
+ }
+
+ if (content is not null)
+ {
+ ++messageCount;
+
+ yield return content;
+ }
+ }
+ }
+ }
+
+ processedStepIds.Add(completedStep.Id);
+ }
+
+ if (logger.IsEnabled(LogLevel.Information)) // Avoid boxing if not enabled
+ {
+ logger.LogInformation("[{MethodName}] Processed #{MessageCount} run messages: {RunId}", nameof(InvokeAsync), messageCount, run.Id);
+ }
+ }
+ while (RunStatus.Completed != run.Status);
+
+ logger.LogInformation("[{MethodName}] Completed run: {RunId}", nameof(InvokeAsync), run.Id);
+
+ // Local function to assist in run polling (participates in method closure).
+ async Task> PollRunStatusAsync()
+ {
+ logger.LogInformation("[{MethodName}] Polling run status: {RunId}", nameof(PollRunStatusAsync), run.Id);
+
+ int count = 0;
+
+ do
+ {
+ // Reduce polling frequency after a couple attempts
+ await Task.Delay(count >= 2 ? pollingConfiguration.RunPollingInterval : pollingConfiguration.RunPollingBackoff, cancellationToken).ConfigureAwait(false);
+ ++count;
+
+#pragma warning disable CA1031 // Do not catch general exception types
+ try
+ {
+ run = await client.GetRunAsync(threadId, run.Id, cancellationToken).ConfigureAwait(false);
+ }
+ catch
+ {
+ // Retry anyway..
+ }
+#pragma warning restore CA1031 // Do not catch general exception types
+ }
+ while (s_pollingStatuses.Contains(run.Status));
+
+ logger.LogInformation("[{MethodName}] Run status is {RunStatus}: {RunId}", nameof(PollRunStatusAsync), run.Status, run.Id);
+
+ return await client.GetRunStepsAsync(run, cancellationToken: cancellationToken).ConfigureAwait(false);
+ }
+
+ // Local function to capture kernel function state for further processing (participates in method closure).
+ IEnumerable ParseFunctionStep(OpenAIAssistantAgent agent, RunStep step)
+ {
+ if (step.Status == RunStepStatus.InProgress && step.StepDetails is RunStepToolCallDetails callDetails)
+ {
+ foreach (RunStepFunctionToolCall toolCall in callDetails.ToolCalls.OfType())
+ {
+ var nameParts = FunctionName.Parse(toolCall.Name, FunctionDelimiter);
+
+ KernelArguments functionArguments = [];
+ if (!string.IsNullOrWhiteSpace(toolCall.Arguments))
+ {
+ Dictionary arguments = JsonSerializer.Deserialize>(toolCall.Arguments)!;
+ foreach (var argumentKvp in arguments)
+ {
+ functionArguments[argumentKvp.Key] = argumentKvp.Value.ToString();
+ }
+ }
+
+ var content = new FunctionCallContent(nameParts.Name, nameParts.PluginName, toolCall.Id, functionArguments);
+
+ functionSteps.Add(toolCall.Id, content);
+
+ yield return content;
+ }
+ }
+ }
+
+ async Task RetrieveMessageAsync(RunStepMessageCreationDetails detail, CancellationToken cancellationToken)
+ {
+ ThreadMessage? message = null;
+
+ bool retry = false;
+ int count = 0;
+ do
+ {
+ try
+ {
+ message = await client.GetMessageAsync(threadId, detail.MessageCreation.MessageId, cancellationToken).ConfigureAwait(false);
+ }
+ catch (RequestFailedException exception)
+ {
+ // Step has provided the message-id. Retry on of NotFound/404 exists.
+ // Extremely rarely there might be a synchronization issue between the
+ // assistant response and message-service.
+ retry = exception.Status == (int)HttpStatusCode.NotFound && count < 3;
+ }
+
+ if (retry)
+ {
+ await Task.Delay(pollingConfiguration.MessageSynchronizationDelay, cancellationToken).ConfigureAwait(false);
+ }
+
+ ++count;
+ }
+ while (retry);
+
+ return message;
+ }
+ }
+
+ private static AnnotationContent GenerateAnnotationContent(MessageTextAnnotation annotation)
+ {
+ string? fileId = null;
+ if (annotation is MessageTextFileCitationAnnotation citationAnnotation)
+ {
+ fileId = citationAnnotation.FileId;
+ }
+ else if (annotation is MessageTextFilePathAnnotation pathAnnotation)
+ {
+ fileId = pathAnnotation.FileId;
+ }
+
+ return
+ new()
+ {
+ Quote = annotation.Text,
+ StartIndex = annotation.StartIndex,
+ EndIndex = annotation.EndIndex,
+ FileId = fileId,
+ };
+ }
+
+ private static ChatMessageContent GenerateImageFileContent(string agentName, AuthorRole role, MessageImageFileContent contentImage)
+ {
+ return
+ new ChatMessageContent(
+ role,
+ [
+ new FileReferenceContent(contentImage.FileId)
+ ])
+ {
+ AuthorName = agentName,
+ };
+ }
+
+ private static ChatMessageContent? GenerateTextMessageContent(string agentName, AuthorRole role, MessageTextContent contentMessage)
+ {
+ ChatMessageContent? messageContent = null;
+
+ string textContent = contentMessage.Text.Trim();
+
+ if (!string.IsNullOrWhiteSpace(textContent))
+ {
+ messageContent =
+ new(role, textContent)
+ {
+ AuthorName = agentName
+ };
+
+ foreach (MessageTextAnnotation annotation in contentMessage.Annotations)
+ {
+ messageContent.Items.Add(GenerateAnnotationContent(annotation));
+ }
+ }
+
+ return messageContent;
+ }
+
+ private static ChatMessageContent GenerateCodeInterpreterContent(string agentName, RunStepCodeInterpreterToolCall contentCodeInterpreter)
+ {
+ return
+ new ChatMessageContent(
+ AuthorRole.Tool,
+ [
+ new TextContent(contentCodeInterpreter.Input)
+ ])
+ {
+ AuthorName = agentName,
+ };
+ }
+
+ private static ChatMessageContent GenerateFunctionCallContent(string agentName, FunctionCallContent[] functionSteps)
+ {
+ ChatMessageContent functionCallContent = new(AuthorRole.Tool, content: null)
+ {
+ AuthorName = agentName
+ };
+
+ functionCallContent.Items.AddRange(functionSteps);
+
+ return functionCallContent;
+ }
+
+ private static ChatMessageContent GenerateFunctionResultContent(string agentName, FunctionCallContent functionStep, string result)
+ {
+ ChatMessageContent functionCallContent = new(AuthorRole.Tool, content: null)
+ {
+ AuthorName = agentName
+ };
+
+ functionCallContent.Items.Add(
+ new FunctionResultContent(
+ functionStep.FunctionName,
+ functionStep.PluginName,
+ functionStep.Id,
+ result));
+
+ return functionCallContent;
+ }
+
+ private static Task[] ExecuteFunctionSteps(OpenAIAssistantAgent agent, FunctionCallContent[] functionSteps, CancellationToken cancellationToken)
+ {
+ Task[] functionTasks = new Task[functionSteps.Length];
+
+ for (int index = 0; index < functionSteps.Length; ++index)
+ {
+ functionTasks[index] = functionSteps[index].InvokeAsync(agent.Kernel, cancellationToken);
+ }
+
+ return functionTasks;
+ }
+
+ private static ToolOutput[] GenerateToolOutputs(FunctionResultContent[] functionResults)
+ {
+ ToolOutput[] toolOutputs = new ToolOutput[functionResults.Length];
+
+ for (int index = 0; index < functionResults.Length; ++index)
+ {
+ FunctionResultContent functionResult = functionResults[index];
+
+ object resultValue = functionResult.Result ?? string.Empty;
+
+ if (resultValue is not string textResult)
+ {
+ textResult = JsonSerializer.Serialize(resultValue);
+ }
+
+ toolOutputs[index] = new ToolOutput(functionResult.CallId, textResult!);
+ }
+
+ return toolOutputs;
+ }
+}
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
index ca016a5d97cb..b46cdb013c18 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
@@ -162,15 +162,91 @@ public static async Task RetrieveAsync(
};
}
- ///
- public async Task DeleteAsync(CancellationToken cancellationToken = default)
+ ///
+ /// Create a new assistant thread.
+ ///
+ /// The to monitor for cancellation requests. The default is .
+ /// The thread identifier
+ public async Task CreateThreadAsync(CancellationToken cancellationToken = default)
{
- if (this.IsDeleted)
+ AssistantThread thread = await this._client.CreateThreadAsync(cancellationToken).ConfigureAwait(false);
+
+ return thread.Id;
+ }
+
+ ///
+ /// Create a new assistant thread.
+ ///
+ /// The thread identifier
+ /// The to monitor for cancellation requests. The default is .
+ /// The thread identifier
+ public async Task DeleteThreadAsync(
+ string threadId,
+ CancellationToken cancellationToken = default)
+ {
+ // Validate input
+ Verify.NotNullOrWhiteSpace(threadId, nameof(threadId));
+
+ return await this._client.DeleteThreadAsync(threadId, cancellationToken).ConfigureAwait(false);
+ }
+
+ ///
+ /// Adds a message to the specified thread.
+ ///
+ /// The thread identifier
+ /// A non-system message with which to append to the conversation.
+ /// The to monitor for cancellation requests. The default is .
+ public Task AddChatMessageAsync(string threadId, ChatMessageContent message, CancellationToken cancellationToken = default)
+ {
+ this.ThrowIfDeleted();
+
+ return AssistantThreadActions.CreateMessageAsync(this._client, threadId, message, cancellationToken);
+ }
+
+ ///
+ /// Gets messages for a specified thread.
+ ///
+ /// The thread identifier
+ /// The to monitor for cancellation requests. The default is .
+ /// Asynchronous enumeration of messages.
+ public IAsyncEnumerable GetThreadMessagesAsync(string threadId, CancellationToken cancellationToken = default)
+ {
+ this.ThrowIfDeleted();
+
+ return AssistantThreadActions.GetMessagesAsync(this._client, threadId, cancellationToken);
+ }
+
+ ///
+ /// Delete the assistant definition.
+ ///
+ ///
+ /// True if assistant definition has been deleted
+ ///
+ /// Assistant based agent will not be useable after deletion.
+ ///
+ public async Task DeleteAsync(CancellationToken cancellationToken = default)
+ {
+ if (!this.IsDeleted)
{
- return;
+ this.IsDeleted = (await this._client.DeleteAssistantAsync(this.Id, cancellationToken).ConfigureAwait(false)).Value;
}
- this.IsDeleted = (await this._client.DeleteAssistantAsync(this.Id, cancellationToken).ConfigureAwait(false)).Value;
+ return this.IsDeleted;
+ }
+
+ ///
+ /// Invoke the assistant on the specified thread.
+ ///
+ /// The thread identifier
+ /// The to monitor for cancellation requests. The default is .
+ /// Asynchronous enumeration of messages.
+ public IAsyncEnumerable InvokeAsync(
+ string threadId,
+ CancellationToken cancellationToken = default)
+ {
+ this.ThrowIfDeleted();
+
+ return AssistantThreadActions.InvokeAsync(this, this._client, threadId, this._config.Polling, this.Logger, cancellationToken);
}
///
@@ -204,15 +280,27 @@ protected override IEnumerable GetChannelKeys()
}
///
- protected override async Task CreateChannelAsync(ILogger logger, CancellationToken cancellationToken)
+ protected override async Task CreateChannelAsync(CancellationToken cancellationToken)
{
- logger.LogDebug("[{MethodName}] Creating assistant thread", nameof(CreateChannelAsync));
+ this.Logger.LogDebug("[{MethodName}] Creating assistant thread", nameof(CreateChannelAsync));
AssistantThread thread = await this._client.CreateThreadAsync(cancellationToken).ConfigureAwait(false);
- logger.LogInformation("[{MethodName}] Created assistant thread: {ThreadId}", nameof(CreateChannelAsync), thread.Id);
+ this.Logger.LogInformation("[{MethodName}] Created assistant thread: {ThreadId}", nameof(CreateChannelAsync), thread.Id);
- return new OpenAIAssistantChannel(this._client, thread.Id, this._config.Polling);
+ return
+ new OpenAIAssistantChannel(this._client, thread.Id, this._config.Polling)
+ {
+ Logger = this.LoggerFactory.CreateLogger()
+ };
+ }
+
+ internal void ThrowIfDeleted()
+ {
+ if (this.IsDeleted)
+ {
+ throw new KernelException($"Agent Failure - {nameof(OpenAIAssistantAgent)} agent is deleted: {this.Id}.");
+ }
}
///
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
index 0d8b20b5b931..b84ef800ebd4 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
@@ -1,15 +1,8 @@
// Copyright (c) Microsoft. All rights reserved.
using System.Collections.Generic;
-using System.Linq;
-using System.Net;
-using System.Runtime.CompilerServices;
-using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
-using Azure;
using Azure.AI.OpenAI.Assistants;
-using Microsoft.Extensions.Logging;
-using Microsoft.SemanticKernel.ChatCompletion;
namespace Microsoft.SemanticKernel.Agents.OpenAI;
@@ -19,485 +12,31 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI;
internal sealed class OpenAIAssistantChannel(AssistantsClient client, string threadId, OpenAIAssistantConfiguration.PollingConfiguration pollingConfiguration)
: AgentChannel
{
- private const string FunctionDelimiter = "-";
-
- private static readonly HashSet s_pollingStatuses =
- [
- RunStatus.Queued,
- RunStatus.InProgress,
- RunStatus.Cancelling,
- ];
-
- private static readonly HashSet s_terminalStatuses =
- [
- RunStatus.Expired,
- RunStatus.Failed,
- RunStatus.Cancelled,
- ];
-
private readonly AssistantsClient _client = client;
private readonly string _threadId = threadId;
- private readonly Dictionary _agentTools = [];
- private readonly Dictionary _agentNames = []; // Cache agent names by their identifier for GetHistoryAsync()
///
protected override async Task ReceiveAsync(IReadOnlyList history, CancellationToken cancellationToken)
{
foreach (ChatMessageContent message in history)
{
- if (string.IsNullOrWhiteSpace(message.Content))
- {
- continue;
- }
-
- await this._client.CreateMessageAsync(
- this._threadId,
- message.Role.ToMessageRole(),
- message.Content,
- cancellationToken: cancellationToken).ConfigureAwait(false);
+ await AssistantThreadActions.CreateMessageAsync(this._client, this._threadId, message, cancellationToken).ConfigureAwait(false);
}
}
///
- protected override async IAsyncEnumerable InvokeAsync(
+ protected override IAsyncEnumerable InvokeAsync(
OpenAIAssistantAgent agent,
- [EnumeratorCancellation] CancellationToken cancellationToken)
+ CancellationToken cancellationToken)
{
- if (agent.IsDeleted)
- {
- throw new KernelException($"Agent Failure - {nameof(OpenAIAssistantAgent)} agent is deleted: {agent.Id}.");
- }
-
- if (!this._agentTools.TryGetValue(agent.Id, out ToolDefinition[]? tools))
- {
- tools = [.. agent.Tools, .. agent.Kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name, FunctionDelimiter)))];
- this._agentTools.Add(agent.Id, tools);
- }
-
- if (!this._agentNames.ContainsKey(agent.Id) && !string.IsNullOrWhiteSpace(agent.Name))
- {
- this._agentNames.Add(agent.Id, agent.Name);
- }
-
- this.Logger.LogDebug("[{MethodName}] Creating run for agent/thrad: {AgentId}/{ThreadId}", nameof(InvokeAsync), agent.Id, this._threadId);
-
- CreateRunOptions options =
- new(agent.Id)
- {
- OverrideInstructions = agent.Instructions,
- OverrideTools = tools,
- };
-
- // Create run
- ThreadRun run = await this._client.CreateRunAsync(this._threadId, options, cancellationToken).ConfigureAwait(false);
-
- this.Logger.LogInformation("[{MethodName}] Created run: {RunId}", nameof(InvokeAsync), run.Id);
-
- // Evaluate status and process steps and messages, as encountered.
- HashSet processedStepIds = [];
- Dictionary functionSteps = [];
-
- do
- {
- // Poll run and steps until actionable
- PageableList steps = await PollRunStatusAsync().ConfigureAwait(false);
-
- // Is in terminal state?
- if (s_terminalStatuses.Contains(run.Status))
- {
- throw new KernelException($"Agent Failure - Run terminated: {run.Status} [{run.Id}]: {run.LastError?.Message ?? "Unknown"}");
- }
-
- // Is tool action required?
- if (run.Status == RunStatus.RequiresAction)
- {
- this.Logger.LogDebug("[{MethodName}] Processing run steps: {RunId}", nameof(InvokeAsync), run.Id);
-
- // Execute functions in parallel and post results at once.
- FunctionCallContent[] activeFunctionSteps = steps.Data.SelectMany(step => ParseFunctionStep(agent, step)).ToArray();
- if (activeFunctionSteps.Length > 0)
- {
- // Emit function-call content
- yield return GenerateFunctionCallContent(agent.GetName(), activeFunctionSteps);
-
- // Invoke functions for each tool-step
- IEnumerable> functionResultTasks = ExecuteFunctionSteps(agent, activeFunctionSteps, cancellationToken);
-
- // Block for function results
- FunctionResultContent[] functionResults = await Task.WhenAll(functionResultTasks).ConfigureAwait(false);
-
- // Process tool output
- ToolOutput[] toolOutputs = GenerateToolOutputs(functionResults);
-
- await this._client.SubmitToolOutputsToRunAsync(run, toolOutputs, cancellationToken).ConfigureAwait(false);
- }
-
- if (this.Logger.IsEnabled(LogLevel.Information)) // Avoid boxing if not enabled
- {
- this.Logger.LogInformation("[{MethodName}] Processed #{MessageCount} run steps: {RunId}", nameof(InvokeAsync), activeFunctionSteps.Length, run.Id);
- }
- }
-
- // Enumerate completed messages
- this.Logger.LogDebug("[{MethodName}] Processing run messages: {RunId}", nameof(InvokeAsync), run.Id);
-
- IEnumerable completedStepsToProcess =
- steps
- .Where(s => s.CompletedAt.HasValue && !processedStepIds.Contains(s.Id))
- .OrderBy(s => s.CreatedAt);
-
- int messageCount = 0;
- foreach (RunStep completedStep in completedStepsToProcess)
- {
- if (completedStep.Type.Equals(RunStepType.ToolCalls))
- {
- RunStepToolCallDetails toolCallDetails = (RunStepToolCallDetails)completedStep.StepDetails;
-
- foreach (RunStepToolCall toolCall in toolCallDetails.ToolCalls)
- {
- ChatMessageContent? content = null;
-
- // Process code-interpreter content
- if (toolCall is RunStepCodeInterpreterToolCall toolCodeInterpreter)
- {
- content = GenerateCodeInterpreterContent(agent.GetName(), toolCodeInterpreter);
- }
- // Process function result content
- else if (toolCall is RunStepFunctionToolCall toolFunction)
- {
- FunctionCallContent functionStep = functionSteps[toolFunction.Id]; // Function step always captured on invocation
- content = GenerateFunctionResultContent(agent.GetName(), functionStep, toolFunction.Output);
- }
+ agent.ThrowIfDeleted();
- if (content is not null)
- {
- ++messageCount;
-
- yield return content;
- }
- }
- }
- else if (completedStep.Type.Equals(RunStepType.MessageCreation))
- {
- RunStepMessageCreationDetails messageCreationDetails = (RunStepMessageCreationDetails)completedStep.StepDetails;
-
- // Retrieve the message
- ThreadMessage? message = await this.RetrieveMessageAsync(messageCreationDetails, cancellationToken).ConfigureAwait(false);
-
- if (message is not null)
- {
- AuthorRole role = new(message.Role.ToString());
-
- foreach (MessageContent itemContent in message.ContentItems)
- {
- ChatMessageContent? content = null;
-
- // Process text content
- if (itemContent is MessageTextContent contentMessage)
- {
- content = GenerateTextMessageContent(agent.GetName(), role, contentMessage);
- }
- // Process image content
- else if (itemContent is MessageImageFileContent contentImage)
- {
- content = GenerateImageFileContent(agent.GetName(), role, contentImage);
- }
-
- if (content is not null)
- {
- ++messageCount;
-
- yield return content;
- }
- }
- }
- }
-
- processedStepIds.Add(completedStep.Id);
- }
-
- if (this.Logger.IsEnabled(LogLevel.Information)) // Avoid boxing if not enabled
- {
- this.Logger.LogInformation("[{MethodName}] Processed #{MessageCount} run messages: {RunId}", nameof(InvokeAsync), messageCount, run.Id);
- }
- }
- while (RunStatus.Completed != run.Status);
-
- this.Logger.LogInformation("[{MethodName}] Completed run: {RunId}", nameof(InvokeAsync), run.Id);
-
- // Local function to assist in run polling (participates in method closure).
- async Task> PollRunStatusAsync()
- {
- this.Logger.LogInformation("[{MethodName}] Polling run status: {RunId}", nameof(PollRunStatusAsync), run.Id);
-
- int count = 0;
-
- do
- {
- // Reduce polling frequency after a couple attempts
- await Task.Delay(count >= 2 ? pollingConfiguration.RunPollingInterval : pollingConfiguration.RunPollingBackoff, cancellationToken).ConfigureAwait(false);
- ++count;
-
-#pragma warning disable CA1031 // Do not catch general exception types
- try
- {
- run = await this._client.GetRunAsync(this._threadId, run.Id, cancellationToken).ConfigureAwait(false);
- }
- catch
- {
- // Retry anyway..
- }
-#pragma warning restore CA1031 // Do not catch general exception types
- }
- while (s_pollingStatuses.Contains(run.Status));
-
- this.Logger.LogInformation("[{MethodName}] Run status is {RunStatus}: {RunId}", nameof(PollRunStatusAsync), run.Status, run.Id);
-
- return await this._client.GetRunStepsAsync(run, cancellationToken: cancellationToken).ConfigureAwait(false);
- }
-
- // Local function to capture kernel function state for further processing (participates in method closure).
- IEnumerable ParseFunctionStep(OpenAIAssistantAgent agent, RunStep step)
- {
- if (step.Status == RunStepStatus.InProgress && step.StepDetails is RunStepToolCallDetails callDetails)
- {
- foreach (RunStepFunctionToolCall toolCall in callDetails.ToolCalls.OfType())
- {
- var nameParts = FunctionName.Parse(toolCall.Name, FunctionDelimiter);
-
- KernelArguments functionArguments = [];
- if (!string.IsNullOrWhiteSpace(toolCall.Arguments))
- {
- Dictionary arguments = JsonSerializer.Deserialize>(toolCall.Arguments)!;
- foreach (var argumentKvp in arguments)
- {
- functionArguments[argumentKvp.Key] = argumentKvp.Value.ToString();
- }
- }
-
- var content = new FunctionCallContent(nameParts.Name, nameParts.PluginName, toolCall.Id, functionArguments);
-
- functionSteps.Add(toolCall.Id, content);
-
- yield return content;
- }
- }
- }
+ return AssistantThreadActions.InvokeAsync(agent, this._client, this._threadId, pollingConfiguration, this.Logger, cancellationToken);
}
///
- protected override async IAsyncEnumerable GetHistoryAsync([EnumeratorCancellation] CancellationToken cancellationToken)
- {
- PageableList messages;
-
- string? lastId = null;
- do
- {
- messages = await this._client.GetMessagesAsync(this._threadId, limit: 100, ListSortOrder.Descending, after: lastId, null, cancellationToken).ConfigureAwait(false);
- foreach (ThreadMessage message in messages)
- {
- AuthorRole role = new(message.Role.ToString());
-
- string? assistantName = null;
- if (!string.IsNullOrWhiteSpace(message.AssistantId) &&
- !this._agentNames.TryGetValue(message.AssistantId, out assistantName))
- {
- Assistant assistant = await this._client.GetAssistantAsync(message.AssistantId, cancellationToken).ConfigureAwait(false);
- if (!string.IsNullOrWhiteSpace(assistant.Name))
- {
- this._agentNames.Add(assistant.Id, assistant.Name);
- }
- }
-
- assistantName ??= message.AssistantId;
-
- foreach (MessageContent item in message.ContentItems)
- {
- ChatMessageContent? content = null;
-
- if (item is MessageTextContent contentMessage)
- {
- content = GenerateTextMessageContent(assistantName, role, contentMessage);
- }
- else if (item is MessageImageFileContent contentImage)
- {
- content = GenerateImageFileContent(assistantName, role, contentImage);
- }
-
- if (content is not null)
- {
- yield return content;
- }
- }
-
- lastId = message.Id;
- }
- }
- while (messages.HasMore);
- }
-
- private static AnnotationContent GenerateAnnotationContent(MessageTextAnnotation annotation)
- {
- string? fileId = null;
- if (annotation is MessageTextFileCitationAnnotation citationAnnotation)
- {
- fileId = citationAnnotation.FileId;
- }
- else if (annotation is MessageTextFilePathAnnotation pathAnnotation)
- {
- fileId = pathAnnotation.FileId;
- }
-
- return
- new()
- {
- Quote = annotation.Text,
- StartIndex = annotation.StartIndex,
- EndIndex = annotation.EndIndex,
- FileId = fileId,
- };
- }
-
- private static ChatMessageContent GenerateImageFileContent(string agentName, AuthorRole role, MessageImageFileContent contentImage)
- {
- return
- new ChatMessageContent(
- role,
- [
- new FileReferenceContent(contentImage.FileId)
- ])
- {
- AuthorName = agentName,
- };
- }
-
- private static ChatMessageContent? GenerateTextMessageContent(string agentName, AuthorRole role, MessageTextContent contentMessage)
- {
- ChatMessageContent? messageContent = null;
-
- string textContent = contentMessage.Text.Trim();
-
- if (!string.IsNullOrWhiteSpace(textContent))
- {
- messageContent =
- new(role, textContent)
- {
- AuthorName = agentName
- };
-
- foreach (MessageTextAnnotation annotation in contentMessage.Annotations)
- {
- messageContent.Items.Add(GenerateAnnotationContent(annotation));
- }
- }
-
- return messageContent;
- }
-
- private static ChatMessageContent GenerateCodeInterpreterContent(string agentName, RunStepCodeInterpreterToolCall contentCodeInterpreter)
- {
- return
- new ChatMessageContent(
- AuthorRole.Tool,
- [
- new TextContent(contentCodeInterpreter.Input)
- ])
- {
- AuthorName = agentName,
- };
- }
-
- private static ChatMessageContent GenerateFunctionCallContent(string agentName, FunctionCallContent[] functionSteps)
- {
- ChatMessageContent functionCallContent = new(AuthorRole.Tool, content: null)
- {
- AuthorName = agentName
- };
-
- functionCallContent.Items.AddRange(functionSteps);
-
- return functionCallContent;
- }
-
- private static ChatMessageContent GenerateFunctionResultContent(string agentName, FunctionCallContent functionStep, string result)
- {
- ChatMessageContent functionCallContent = new(AuthorRole.Tool, content: null)
- {
- AuthorName = agentName
- };
-
- functionCallContent.Items.Add(
- new FunctionResultContent(
- functionStep.FunctionName,
- functionStep.PluginName,
- functionStep.Id,
- result));
-
- return functionCallContent;
- }
-
- private static Task[] ExecuteFunctionSteps(OpenAIAssistantAgent agent, FunctionCallContent[] functionSteps, CancellationToken cancellationToken)
- {
- Task[] functionTasks = new Task[functionSteps.Length];
-
- for (int index = 0; index < functionSteps.Length; ++index)
- {
- functionTasks[index] = functionSteps[index].InvokeAsync(agent.Kernel, cancellationToken);
- }
-
- return functionTasks;
- }
-
- private static ToolOutput[] GenerateToolOutputs(FunctionResultContent[] functionResults)
+ protected override IAsyncEnumerable GetHistoryAsync(CancellationToken cancellationToken)
{
- ToolOutput[] toolOutputs = new ToolOutput[functionResults.Length];
-
- for (int index = 0; index < functionResults.Length; ++index)
- {
- FunctionResultContent functionResult = functionResults[index];
-
- object resultValue = (functionResult.Result as FunctionResult)?.GetValue
/// Chat history to be used for the request.
/// Execution settings to be used for the request.
- /// TexGenerationtRequest object.
- internal static ChatCompletionRequest FromChatHistoryAndExecutionSettings(ChatHistory chatHistory, HuggingFacePromptExecutionSettings executionSettings)
+ /// Model id to use if value in prompt execution settings is not set.
+ /// TexGenerationRequest object.
+ internal static ChatCompletionRequest FromChatHistoryAndExecutionSettings(ChatHistory chatHistory, HuggingFacePromptExecutionSettings executionSettings, string modelId)
{
return new ChatCompletionRequest
{
@@ -118,7 +119,7 @@ internal static ChatCompletionRequest FromChatHistoryAndExecutionSettings(ChatHi
Temperature = executionSettings.Temperature,
Stop = executionSettings.Stop,
MaxTokens = executionSettings.MaxTokens,
- Model = executionSettings.ModelId ?? TextGenerationInferenceDefaultModel,
+ Model = executionSettings.ModelId ?? modelId ?? TextGenerationInferenceDefaultModel,
TopP = executionSettings.TopP,
TopLogProbs = executionSettings.TopLogProbs
};
diff --git a/dotnet/src/Connectors/Connectors.Memory.Kusto/KustoMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Kusto/KustoMemoryStore.cs
index dcccc7983b91..359de4d57a5e 100644
--- a/dotnet/src/Connectors/Connectors.Memory.Kusto/KustoMemoryStore.cs
+++ b/dotnet/src/Connectors/Connectors.Memory.Kusto/KustoMemoryStore.cs
@@ -358,7 +358,7 @@ protected virtual void Dispose(bool disposing)
/// Kusto table name.
/// Boolean flag that indicates if table name normalization is needed.
private static string GetTableName(string collectionName, bool normalized = true)
- => normalized ? CslSyntaxGenerator.NormalizeTableName(collectionName) : collectionName;
+ => normalized ? CslSyntaxGenerator.NormalizeName(collectionName) : collectionName;
///
/// Converts Kusto table name to collection name.
diff --git a/dotnet/src/Connectors/Connectors.Memory.Milvus/MilvusMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Milvus/MilvusMemoryStore.cs
index 38d10778a723..7bdd2f03db94 100644
--- a/dotnet/src/Connectors/Connectors.Memory.Milvus/MilvusMemoryStore.cs
+++ b/dotnet/src/Connectors/Connectors.Memory.Milvus/MilvusMemoryStore.cs
@@ -446,7 +446,7 @@ public Task RemoveBatchAsync(string collectionName, IEnumerable keys, Ca
MilvusCollection collection = this.Client.GetCollection(collectionName);
SearchResults results = await collection
- .SearchAsync(EmbeddingFieldName, [embedding], SimilarityMetricType.Ip, limit, this._searchParameters, cancellationToken)
+ .SearchAsync(EmbeddingFieldName, [embedding], this._metricType, limit, this._searchParameters, cancellationToken)
.ConfigureAwait(false);
IReadOnlyList ids = results.Ids.StringIds!;
diff --git a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Connectors.Memory.Qdrant.csproj b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Connectors.Memory.Qdrant.csproj
index da803a71b52a..d9037605f6e5 100644
--- a/dotnet/src/Connectors/Connectors.Memory.Qdrant/Connectors.Memory.Qdrant.csproj
+++ b/dotnet/src/Connectors/Connectors.Memory.Qdrant/Connectors.Memory.Qdrant.csproj
@@ -22,6 +22,10 @@
+
+
+
+
diff --git a/dotnet/src/Connectors/Connectors.Memory.Redis/Connectors.Memory.Redis.csproj b/dotnet/src/Connectors/Connectors.Memory.Redis/Connectors.Memory.Redis.csproj
index 878cc229aeaf..52f2ea6f159c 100644
--- a/dotnet/src/Connectors/Connectors.Memory.Redis/Connectors.Memory.Redis.csproj
+++ b/dotnet/src/Connectors/Connectors.Memory.Redis/Connectors.Memory.Redis.csproj
@@ -1,4 +1,4 @@
-
+
@@ -23,6 +23,10 @@
+
+
+
+
diff --git a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerClient.cs b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerClient.cs
index 222381814b4a..a52716e909b2 100644
--- a/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerClient.cs
+++ b/dotnet/src/Connectors/Connectors.Memory.SqlServer/SqlServerClient.cs
@@ -33,10 +33,23 @@ public SqlServerClient(SqlConnection connection, string schema)
this._schema = schema;
}
+ private async Task HasJsonNativeTypeAsync(CancellationToken cancellationToken = default)
+ {
+ using (await this.OpenConnectionAsync(cancellationToken).ConfigureAwait(false))
+ {
+ using var cmd = this._connection.CreateCommand();
+ cmd.CommandText = "select [name] from sys.types where system_type_id = 244 and user_type_id = 244";
+ var typeName = (string)await cmd.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false);
+ return string.Equals(typeName, "json", StringComparison.OrdinalIgnoreCase);
+ }
+ }
+
///
public async Task CreateTableAsync(string tableName, CancellationToken cancellationToken = default)
{
var fullTableName = this.GetSanitizedFullTableName(tableName);
+ var metadataType = await this.HasJsonNativeTypeAsync(cancellationToken).ConfigureAwait(false) ? "json" : "nvarchar(max)";
+
using (await this.OpenConnectionAsync(cancellationToken).ConfigureAwait(false))
{
using var cmd = this._connection.CreateCommand();
@@ -44,7 +57,7 @@ public async Task CreateTableAsync(string tableName, CancellationToken cancellat
IF OBJECT_ID(N'{fullTableName}', N'U') IS NULL
CREATE TABLE {fullTableName} (
[key] nvarchar(255) collate latin1_general_bin2 not null,
- [metadata] nvarchar(max) not null,
+ [metadata] {metadataType} not null,
[embedding] varbinary(8000),
[timestamp] datetimeoffset,
PRIMARY KEY NONCLUSTERED ([key]),
@@ -138,9 +151,9 @@ WHEN NOT MATCHED THEN
///
public async IAsyncEnumerable ReadBatchAsync(string tableName, IEnumerable keys, bool withEmbeddings = false, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
- var queryColumns = withEmbeddings
- ? "[key], [metadata], [timestamp], VECTOR_TO_JSON_ARRAY([embedding]) AS [embedding]"
- : "[key], [metadata], [timestamp]";
+ var queryColumns = "[key], [metadata], [timestamp]" +
+ (withEmbeddings ? ", VECTOR_TO_JSON_ARRAY([embedding]) AS [embedding]" : string.Empty);
+
var fullTableName = this.GetSanitizedFullTableName(tableName);
var keysList = keys.ToList();
var keysParams = string.Join(", ", keysList.Select((_, i) => $"@k{i}"));
@@ -189,9 +202,8 @@ WHERE [key] IN ({keysParams})
///
public async IAsyncEnumerable<(SqlServerMemoryEntry, double)> GetNearestMatchesAsync(string tableName, ReadOnlyMemory embedding, int limit, double minRelevanceScore = 0, bool withEmbeddings = false, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
- var queryColumns = withEmbeddings
- ? "[key], [metadata], [timestamp], 1 - VECTOR_DISTANCE('cosine', [embedding], JSON_ARRAY_TO_VECTOR(@e)) AS [cosine_similarity], VECTOR_TO_JSON_ARRAY([embedding]) AS [embedding]"
- : "[key], [metadata], [timestamp], 1 - VECTOR_DISTANCE('cosine', [embedding], JSON_ARRAY_TO_VECTOR(@e)) AS [cosine_similarity]";
+ var queryColumns = "[key], [metadata], [timestamp], 1 - VECTOR_DISTANCE('cosine', [embedding], JSON_ARRAY_TO_VECTOR(@e)) AS [cosine_similarity]" +
+ (withEmbeddings ? ", VECTOR_TO_JSON_ARRAY([embedding]) AS [embedding]" : string.Empty);
var fullTableName = this.GetSanitizedFullTableName(tableName);
using (await this.OpenConnectionAsync(cancellationToken).ConfigureAwait(false))
{
@@ -221,6 +233,7 @@ ORDER BY [cosine_similarity] DESC
private string GetSanitizedFullTableName(string tableName) => $"{DelimitIdentifier(this._schema)}.{DelimitIdentifier(tableName)}";
private string SerializeEmbedding(ReadOnlyMemory embedding) => JsonSerializer.Serialize(embedding);
+
private ReadOnlyMemory DeserializeEmbedding(string embedding) => JsonSerializer.Deserialize>(embedding);
private SqlServerMemoryEntry ReadEntry(SqlDataReader reader, bool hasEmbedding)
@@ -247,6 +260,7 @@ private async Task OpenConnectionAsync(CancellationToken cancellati
}
private static string DelimitIdentifier(string identifier) => $"[{EscapeIdentifier(identifier)}]";
+
private static string EscapeIdentifier(string identifier) => identifier.Replace("]", "]]");
private readonly struct Closer(SqlServerClient client, bool shouldClose) : IDisposable
diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/Database.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/Database.cs
index aee0735507c5..581a21afc52a 100644
--- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/Database.cs
+++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/Database.cs
@@ -168,6 +168,27 @@ DELETE FROM {TableName}
return cmd.ExecuteNonQueryAsync(cancellationToken);
}
+ public Task DeleteBatchAsync(SqliteConnection conn, string collectionName, string[] keys, CancellationToken cancellationToken = default)
+ {
+ using SqliteCommand cmd = conn.CreateCommand();
+ var keyParameters = keys.Select((key, index) => $"@key{index}");
+ var parameters = string.Join(", ", keyParameters);
+
+#pragma warning disable CA2100 // Review SQL queries for security vulnerabilities
+ cmd.CommandText = $@"
+ DELETE FROM {TableName}
+ WHERE collection=@collection
+ AND key IN ({parameters})";
+#pragma warning restore CA2100 // Review SQL queries for security vulnerabilities
+
+ cmd.Parameters.Add(new SqliteParameter("@collection", collectionName));
+ for (int i = 0; i < keys.Length; i++)
+ {
+ cmd.Parameters.Add(new SqliteParameter($"@key{i}", keys[i]));
+ }
+ return cmd.ExecuteNonQueryAsync(cancellationToken);
+ }
+
public Task DeleteEmptyAsync(SqliteConnection conn, string collectionName, CancellationToken cancellationToken = default)
{
using SqliteCommand cmd = conn.CreateCommand();
diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteMemoryStore.cs
index 1dbe176146ce..3891df9c4de9 100644
--- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteMemoryStore.cs
+++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteMemoryStore.cs
@@ -113,7 +113,7 @@ public async Task RemoveAsync(string collectionName, string key, CancellationTok
///
public async Task RemoveBatchAsync(string collectionName, IEnumerable keys, CancellationToken cancellationToken = default)
{
- await Task.WhenAll(keys.Select(k => this._dbConnector.DeleteAsync(this._dbConnection, collectionName, k, cancellationToken))).ConfigureAwait(false);
+ await this._dbConnector.DeleteBatchAsync(this._dbConnection, collectionName, keys.ToArray(), cancellationToken).ConfigureAwait(false);
}
///
@@ -218,8 +218,7 @@ private SqliteMemoryStore(string filename)
private static DateTimeOffset? ParseTimestamp(string? str)
{
- if (!string.IsNullOrEmpty(str)
- && DateTimeOffset.TryParse(str, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out DateTimeOffset timestamp))
+ if (DateTimeOffset.TryParse(str, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out DateTimeOffset timestamp))
{
return timestamp;
}
diff --git a/dotnet/src/Connectors/Connectors.MistralAI/Client/MistralClient.cs b/dotnet/src/Connectors/Connectors.MistralAI/Client/MistralClient.cs
index cdd9c33f4789..fe3683d4f495 100644
--- a/dotnet/src/Connectors/Connectors.MistralAI/Client/MistralClient.cs
+++ b/dotnet/src/Connectors/Connectors.MistralAI/Client/MistralClient.cs
@@ -177,7 +177,8 @@ internal async Task> GetChatMessageContentsAsy
Arguments = functionArgs,
RequestSequenceIndex = requestIndex - 1,
FunctionSequenceIndex = toolCallIndex,
- FunctionCount = chatChoice.ToolCalls.Count
+ FunctionCount = chatChoice.ToolCalls.Count,
+ CancellationToken = cancellationToken
};
s_inflightAutoInvokes.Value++;
try
@@ -409,6 +410,7 @@ internal async IAsyncEnumerable GetStreamingChatMes
RequestSequenceIndex = requestIndex - 1,
FunctionSequenceIndex = toolCallIndex,
FunctionCount = toolCalls.Count,
+ CancellationToken = cancellationToken
};
s_inflightAutoInvokes.Value++;
try
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs
index 8059077d8bf4..5d08b38da29c 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs
@@ -511,7 +511,8 @@ internal async Task> GetChatMessageContentsAsy
Arguments = functionArgs,
RequestSequenceIndex = requestIndex - 1,
FunctionSequenceIndex = toolCallIndex,
- FunctionCount = result.ToolCalls.Count
+ FunctionCount = result.ToolCalls.Count,
+ CancellationToken = cancellationToken
};
s_inflightAutoInvokes.Value++;
@@ -693,7 +694,18 @@ internal async IAsyncEnumerable GetStreamingC
OpenAIFunctionToolCall.TrackStreamingToolingUpdate(update.ToolCallUpdate, ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex);
}
- var openAIStreamingChatMessageContent = new OpenAIStreamingChatMessageContent(update, update.ChoiceIndex ?? 0, this.DeploymentOrModelName, metadata) { AuthorName = streamedName };
+ AuthorRole? role = null;
+ if (streamedRole.HasValue)
+ {
+ role = new AuthorRole(streamedRole.Value.ToString());
+ }
+
+ OpenAIStreamingChatMessageContent openAIStreamingChatMessageContent =
+ new(update, update.ChoiceIndex ?? 0, this.DeploymentOrModelName, metadata)
+ {
+ AuthorName = streamedName,
+ Role = role,
+ };
if (update.ToolCallUpdate is StreamingFunctionToolCallUpdate functionCallUpdate)
{
@@ -798,7 +810,8 @@ internal async IAsyncEnumerable GetStreamingC
Arguments = functionArgs,
RequestSequenceIndex = requestIndex - 1,
FunctionSequenceIndex = toolCallIndex,
- FunctionCount = toolCalls.Length
+ FunctionCount = toolCalls.Length,
+ CancellationToken = cancellationToken
};
s_inflightAutoInvokes.Value++;
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/CompatibilitySuppressions.xml b/dotnet/src/Connectors/Connectors.OpenAI/CompatibilitySuppressions.xml
index 33de31d4263b..3477ed220ea0 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/CompatibilitySuppressions.xml
+++ b/dotnet/src/Connectors/Connectors.OpenAI/CompatibilitySuppressions.xml
@@ -1,6 +1,20 @@
+
+ CP0002
+ F:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFilePurpose.Assistants
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+
+
+ CP0002
+ F:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFilePurpose.FineTune
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+ CP0002M:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFileService.GetFileContent(System.String,System.Threading.CancellationToken)
@@ -29,6 +43,20 @@
lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dlltrue
+
+ CP0002
+ F:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFilePurpose.Assistants
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+
+
+ CP0002
+ F:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFilePurpose.FineTune
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+ CP0002M:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFileService.GetFileContent(System.String,System.Threading.CancellationToken)
@@ -57,4 +85,32 @@
lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dlltrue
+
+ CP0007
+ T:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFilePurpose
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+
+
+ CP0007
+ T:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFilePurpose
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+
+
+ CP0008
+ T:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFilePurpose
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+
+
+ CP0008
+ T:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFilePurpose
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+
\ No newline at end of file
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFilePurpose.cs b/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFilePurpose.cs
index a01b2d08fa8d..8d87720fa89f 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFilePurpose.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFilePurpose.cs
@@ -1,22 +1,99 @@
// Copyright (c) Microsoft. All rights reserved.
+using System;
using System.Diagnostics.CodeAnalysis;
namespace Microsoft.SemanticKernel.Connectors.OpenAI;
///
-/// Defines the purpose associated with the uploaded file.
+/// Defines the purpose associated with the uploaded file:
+/// https://platform.openai.com/docs/api-reference/files/object#files/object-purpose
///
[Experimental("SKEXP0010")]
-public enum OpenAIFilePurpose
+public readonly struct OpenAIFilePurpose : IEquatable
{
///
- /// File to be used by assistants for model processing.
+ /// File to be used by assistants as input.
///
- Assistants,
+ public static OpenAIFilePurpose Assistants { get; } = new("assistants");
///
- /// File to be used by fine-tuning jobs.
+ /// File produced as assistants output.
///
- FineTune,
+ public static OpenAIFilePurpose AssistantsOutput { get; } = new("assistants_output");
+
+ ///
+ /// Files uploaded as a batch of API requests
+ ///
+ public static OpenAIFilePurpose Batch { get; } = new("batch");
+
+ ///
+ /// File produced as result of a file included as a batch request.
+ ///
+ public static OpenAIFilePurpose BatchOutput { get; } = new("batch_output");
+
+ ///
+ /// File to be used as input to fine-tune a model.
+ ///
+ public static OpenAIFilePurpose FineTune { get; } = new("fine-tune");
+
+ ///
+ /// File produced as result of fine-tuning a model.
+ ///
+ public static OpenAIFilePurpose FineTuneResults { get; } = new("fine-tune-results");
+
+ ///
+ /// File to be used for Assistants image file inputs.
+ ///
+ public static OpenAIFilePurpose Vision { get; } = new("vision");
+
+ ///
+ /// Gets the label associated with this .
+ ///
+ public string Label { get; }
+
+ ///
+ /// Creates a new instance with the provided label.
+ ///
+ /// The label to associate with this .
+ public OpenAIFilePurpose(string label)
+ {
+ Verify.NotNullOrWhiteSpace(label, nameof(label));
+ this.Label = label!;
+ }
+
+ ///
+ /// Returns a value indicating whether two instances are equivalent, as determined by a
+ /// case-insensitive comparison of their labels.
+ ///
+ /// the first instance to compare
+ /// the second instance to compare
+ /// true if left and right are both null or have equivalent labels; false otherwise
+ public static bool operator ==(OpenAIFilePurpose left, OpenAIFilePurpose right)
+ => left.Equals(right);
+
+ ///
+ /// Returns a value indicating whether two instances are not equivalent, as determined by a
+ /// case-insensitive comparison of their labels.
+ ///
+ /// the first instance to compare
+ /// the second instance to compare
+ /// false if left and right are both null or have equivalent labels; true otherwise
+ public static bool operator !=(OpenAIFilePurpose left, OpenAIFilePurpose right)
+ => !(left == right);
+
+ ///
+ public override bool Equals([NotNullWhen(true)] object? obj)
+ => obj is OpenAIFilePurpose otherPurpose && this == otherPurpose;
+
+ ///
+ public bool Equals(OpenAIFilePurpose other)
+ => string.Equals(this.Label, other.Label, StringComparison.OrdinalIgnoreCase);
+
+ ///
+ public override int GetHashCode()
+ => StringComparer.OrdinalIgnoreCase.GetHashCode(this.Label);
+
+ ///
+ public override string ToString() => this.Label;
}
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileService.cs b/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileService.cs
index cc61734f44c8..690954448eea 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileService.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileService.cs
@@ -112,7 +112,8 @@ public async Task DeleteFileAsync(string id, CancellationToken cancellationToken
public async Task GetFileContentAsync(string id, CancellationToken cancellationToken = default)
{
Verify.NotNull(id, nameof(id));
- var (stream, mimetype) = await this.StreamGetRequestAsync($"{this._serviceUri}/{id}/content", cancellationToken).ConfigureAwait(false);
+ var contentUri = $"{this._serviceUri}/{id}/content";
+ var (stream, mimetype) = await this.StreamGetRequestAsync(contentUri, cancellationToken).ConfigureAwait(false);
using (stream)
{
@@ -123,7 +124,12 @@ public async Task GetFileContentAsync(string id, CancellationToke
#else
await stream.CopyToAsync(memoryStream, cancellationToken).ConfigureAwait(false);
#endif
- return new BinaryContent(memoryStream.ToArray(), mimetype);
+ return
+ new(memoryStream.ToArray(), mimetype)
+ {
+ Metadata = new Dictionary() { { "id", id } },
+ Uri = new Uri(contentUri),
+ };
}
}
@@ -147,9 +153,19 @@ public async Task GetFileAsync(string id, CancellationToken
///
/// The to monitor for cancellation requests. The default is .
/// The metadata of all uploaded files.
- public async Task> GetFilesAsync(CancellationToken cancellationToken = default)
+ public Task> GetFilesAsync(CancellationToken cancellationToken = default)
+ => this.GetFilesAsync(null, cancellationToken);
+
+ ///
+ /// Retrieve metadata for previously uploaded files
+ ///
+ /// The purpose of the files by which to filter.
+ /// The to monitor for cancellation requests. The default is .
+ /// The metadata of all uploaded files.
+ public async Task> GetFilesAsync(OpenAIFilePurpose? filePurpose, CancellationToken cancellationToken = default)
{
- var result = await this.ExecuteGetRequestAsync(this._serviceUri.ToString(), cancellationToken).ConfigureAwait(false);
+ var serviceUri = filePurpose.HasValue && !string.IsNullOrEmpty(filePurpose.Value.Label) ? $"{this._serviceUri}?purpose={filePurpose}" : this._serviceUri.ToString();
+ var result = await this.ExecuteGetRequestAsync(serviceUri, cancellationToken).ConfigureAwait(false);
return result.Data.Select(this.ConvertFileReference).ToArray();
}
@@ -167,7 +183,7 @@ public async Task UploadContentAsync(BinaryContent fileCont
Verify.NotNull(fileContent.Data, nameof(fileContent.Data));
using var formData = new MultipartFormDataContent();
- using var contentPurpose = new StringContent(this.ConvertPurpose(settings.Purpose));
+ using var contentPurpose = new StringContent(settings.Purpose.Label);
using var contentFile = new ByteArrayContent(fileContent.Data.Value.ToArray());
formData.Add(contentPurpose, "purpose");
formData.Add(contentFile, "file", settings.FileName);
@@ -281,26 +297,10 @@ private OpenAIFileReference ConvertFileReference(FileInfo result)
FileName = result.FileName,
CreatedTimestamp = DateTimeOffset.FromUnixTimeSeconds(result.CreatedAt).UtcDateTime,
SizeInBytes = result.Bytes ?? 0,
- Purpose = this.ConvertPurpose(result.Purpose),
+ Purpose = new(result.Purpose),
};
}
- private OpenAIFilePurpose ConvertPurpose(string purpose) =>
- purpose.ToUpperInvariant() switch
- {
- "ASSISTANTS" => OpenAIFilePurpose.Assistants,
- "FINE-TUNE" => OpenAIFilePurpose.FineTune,
- _ => throw new KernelException($"Unknown {nameof(OpenAIFilePurpose)}: {purpose}."),
- };
-
- private string ConvertPurpose(OpenAIFilePurpose purpose) =>
- purpose switch
- {
- OpenAIFilePurpose.Assistants => "assistants",
- OpenAIFilePurpose.FineTune => "fine-tune",
- _ => throw new KernelException($"Unknown {nameof(OpenAIFilePurpose)}: {purpose}."),
- };
-
private sealed class FileInfoList
{
[JsonPropertyName("data")]
diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/.editorconfig b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/.editorconfig
new file mode 100644
index 000000000000..394eef685f21
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/.editorconfig
@@ -0,0 +1,6 @@
+# Suppressing errors for Test projects under dotnet folder
+[*.cs]
+dotnet_diagnostic.CA2007.severity = none # Do not directly await a Task
+dotnet_diagnostic.VSTHRD111.severity = none # Use .ConfigureAwait(bool) is hidden by default, set to none to prevent IDE from changing on autosave
+dotnet_diagnostic.CS1591.severity = none # Missing XML comment for publicly visible type or member
+dotnet_diagnostic.IDE1006.severity = warning # Naming rule violations
diff --git a/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/Connectors.Qdrant.UnitTests.csproj b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/Connectors.Qdrant.UnitTests.csproj
new file mode 100644
index 000000000000..87782f3d2e8f
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/Connectors.Qdrant.UnitTests.csproj
@@ -0,0 +1,38 @@
+
+
+
+ Microsoft.SemanticKernel.Connectors.Qdrant.UnitTests
+ Microsoft.SemanticKernel.Connectors.Qdrant.UnitTests
+ net8.0
+ true
+ enable
+ disable
+ false
+ $(NoWarn);CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0050
+
+
+
+
+
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryBuilderExtensionsTests.cs
similarity index 97%
rename from dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryBuilderExtensionsTests.cs
rename to dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryBuilderExtensionsTests.cs
index 8d43f12d8983..897a09087f09 100644
--- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryBuilderExtensionsTests.cs
+++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryBuilderExtensionsTests.cs
@@ -11,7 +11,7 @@
using Moq;
using Xunit;
-namespace SemanticKernel.Connectors.UnitTests.Qdrant;
+namespace SemanticKernel.Connectors.Qdrant.UnitTests;
public sealed class QdrantMemoryBuilderExtensionsTests : IDisposable
{
diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests.cs
similarity index 99%
rename from dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests.cs
rename to dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests.cs
index 499164c31c68..6ae498561065 100644
--- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests.cs
+++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests.cs
@@ -12,7 +12,7 @@
using Moq;
using Xunit;
-namespace SemanticKernel.Connectors.UnitTests.Qdrant;
+namespace SemanticKernel.Connectors.Qdrant.UnitTests;
///
/// Tests for collection and upsert operations.
diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests2.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests2.cs
similarity index 99%
rename from dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests2.cs
rename to dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests2.cs
index a7303f9e47a6..8af2061c5d3a 100644
--- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests2.cs
+++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests2.cs
@@ -11,7 +11,7 @@
using Moq;
using Xunit;
-namespace SemanticKernel.Connectors.UnitTests.Qdrant;
+namespace SemanticKernel.Connectors.Qdrant.UnitTests;
///
/// Tests for Get and Remove operations.
diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests3.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests3.cs
similarity index 99%
rename from dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests3.cs
rename to dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests3.cs
index f1cff494ff4d..ad7d54e2d5bb 100644
--- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantMemoryStoreTests3.cs
+++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantMemoryStoreTests3.cs
@@ -15,7 +15,7 @@
using Moq.Protected;
using Xunit;
-namespace SemanticKernel.Connectors.UnitTests.Qdrant;
+namespace SemanticKernel.Connectors.Qdrant.UnitTests;
///
/// Tests for Search operations.
diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantVectorDbClientTests.cs b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorDbClientTests.cs
similarity index 97%
rename from dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantVectorDbClientTests.cs
rename to dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorDbClientTests.cs
index 2223f25e62ee..41a95178a588 100644
--- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Qdrant/QdrantVectorDbClientTests.cs
+++ b/dotnet/src/Connectors/Connectors.Qdrant.UnitTests/QdrantVectorDbClientTests.cs
@@ -6,7 +6,7 @@
using Microsoft.SemanticKernel.Connectors.Qdrant;
using Xunit;
-namespace SemanticKernel.Connectors.UnitTests.Qdrant;
+namespace SemanticKernel.Connectors.Qdrant.UnitTests;
public sealed class QdrantVectorDbClientTests : IDisposable
{
diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/.editorconfig b/dotnet/src/Connectors/Connectors.Redis.UnitTests/.editorconfig
new file mode 100644
index 000000000000..394eef685f21
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/.editorconfig
@@ -0,0 +1,6 @@
+# Suppressing errors for Test projects under dotnet folder
+[*.cs]
+dotnet_diagnostic.CA2007.severity = none # Do not directly await a Task
+dotnet_diagnostic.VSTHRD111.severity = none # Use .ConfigureAwait(bool) is hidden by default, set to none to prevent IDE from changing on autosave
+dotnet_diagnostic.CS1591.severity = none # Missing XML comment for publicly visible type or member
+dotnet_diagnostic.IDE1006.severity = warning # Naming rule violations
diff --git a/dotnet/src/Connectors/Connectors.Redis.UnitTests/Connectors.Redis.UnitTests.csproj b/dotnet/src/Connectors/Connectors.Redis.UnitTests/Connectors.Redis.UnitTests.csproj
new file mode 100644
index 000000000000..c54e1a3b5136
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/Connectors.Redis.UnitTests.csproj
@@ -0,0 +1,37 @@
+
+
+
+ Microsoft.SemanticKernel.Connectors.Redis.UnitTests
+ Microsoft.SemanticKernel.Connectors.Redis.UnitTests
+ net8.0
+ true
+ enable
+ disable
+ false
+ $(NoWarn);CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0050
+
+
+
+
+
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Redis/RedisMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisMemoryStoreTests.cs
similarity index 99%
rename from dotnet/src/Connectors/Connectors.UnitTests/Memory/Redis/RedisMemoryStoreTests.cs
rename to dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisMemoryStoreTests.cs
index 53f41384171d..5c63e568a3a9 100644
--- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Redis/RedisMemoryStoreTests.cs
+++ b/dotnet/src/Connectors/Connectors.Redis.UnitTests/RedisMemoryStoreTests.cs
@@ -15,7 +15,7 @@
using StackExchange.Redis;
using Xunit;
-namespace SemanticKernel.Connectors.UnitTests.Redis;
+namespace SemanticKernel.Connectors.Redis.UnitTests;
///
/// Unit tests of .
diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj b/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj
index 455206f5ce04..a4b7bd6ace44 100644
--- a/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj
+++ b/dotnet/src/Connectors/Connectors.UnitTests/Connectors.UnitTests.csproj
@@ -39,14 +39,11 @@
-
-
-
diff --git a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Kusto/KustoMemoryStoreTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Kusto/KustoMemoryStoreTests.cs
index d8a2ec5c78cc..7cdec0210775 100644
--- a/dotnet/src/Connectors/Connectors.UnitTests/Memory/Kusto/KustoMemoryStoreTests.cs
+++ b/dotnet/src/Connectors/Connectors.UnitTests/Memory/Kusto/KustoMemoryStoreTests.cs
@@ -25,6 +25,7 @@ public class KustoMemoryStoreTests
private const string DatabaseName = "FakeDb";
private readonly Mock _cslQueryProviderMock;
private readonly Mock _cslAdminProviderMock;
+ private readonly string _normalisedCollectionName = CslSyntaxGenerator.NormalizeName(CollectionName);
public KustoMemoryStoreTests()
{
@@ -145,7 +146,7 @@ public async Task ItCanUpsertAsync()
// Assert
this._cslAdminProviderMock.Verify(client => client.ExecuteControlCommandAsync(
DatabaseName,
- It.Is(s => s.StartsWith($".ingest inline into table {CollectionName}", StringComparison.Ordinal) && s.Contains(actualMemoryRecordKey, StringComparison.Ordinal)),
+ It.Is(s => s.StartsWith($".ingest inline into table {this._normalisedCollectionName}", StringComparison.Ordinal) && s.Contains(actualMemoryRecordKey, StringComparison.Ordinal)),
It.IsAny()), Times.Once());
Assert.Equal(expectedMemoryRecord.Key, actualMemoryRecordKey);
}
@@ -171,7 +172,7 @@ public async Task ItCanUpsertBatchAsyncAsync()
.Verify(client => client.ExecuteControlCommandAsync(
DatabaseName,
It.Is(s =>
- s.StartsWith($".ingest inline into table {CollectionName}", StringComparison.Ordinal) &&
+ s.StartsWith($".ingest inline into table {this._normalisedCollectionName}", StringComparison.Ordinal) &&
batchUpsertMemoryRecords.All(r => s.Contains(r.Key, StringComparison.Ordinal))),
It.IsAny()
), Times.Once());
@@ -306,7 +307,7 @@ public async Task ItCanRemoveAsync()
this._cslAdminProviderMock
.Verify(client => client.ExecuteControlCommandAsync(
DatabaseName,
- It.Is(s => s.Replace(" ", " ").StartsWith($".delete table {CollectionName}") && s.Contains(MemoryRecordKey)), // Replace double spaces with single space to account for the fact that the query is formatted with double spaces and to be future proof
+ It.Is(s => s.Replace(" ", " ").StartsWith($".delete table {this._normalisedCollectionName}") && s.Contains(MemoryRecordKey)), // Replace double spaces with single space to account for the fact that the query is formatted with double spaces and to be future proof
It.IsAny()
), Times.Once());
}
@@ -325,7 +326,7 @@ public async Task ItCanRemoveBatchAsync()
this._cslAdminProviderMock
.Verify(client => client.ExecuteControlCommandAsync(
DatabaseName,
- It.Is(s => s.Replace(" ", " ").StartsWith($".delete table {CollectionName}") && memoryRecordKeys.All(r => s.Contains(r, StringComparison.OrdinalIgnoreCase))),
+ It.Is(s => s.Replace(" ", " ").StartsWith($".delete table {this._normalisedCollectionName}") && memoryRecordKeys.All(r => s.Contains(r, StringComparison.OrdinalIgnoreCase))),
It.IsAny()
), Times.Once());
}
diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/AutoFunctionInvocationFilterTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/AutoFunctionInvocationFilterTests.cs
index 1151ea41bc9b..f528edfe503d 100644
--- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/AutoFunctionInvocationFilterTests.cs
+++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/FunctionCalling/AutoFunctionInvocationFilterTests.cs
@@ -5,6 +5,7 @@
using System.Linq;
using System.Net;
using System.Net.Http;
+using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.SemanticKernel;
@@ -569,6 +570,53 @@ public async Task PostFilterCanTerminateOperationOnStreamingAsync()
Assert.Equal(AuthorRole.Tool, lastMessageContent.Role);
}
+ [Fact]
+ public async Task FilterContextHasCancellationTokenAsync()
+ {
+ // Arrange
+ using var cancellationTokenSource = new CancellationTokenSource();
+ int firstFunctionInvocations = 0;
+ int secondFunctionInvocations = 0;
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) =>
+ {
+ cancellationTokenSource.Cancel();
+ firstFunctionInvocations++;
+ return parameter;
+ }, "Function1");
+
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) =>
+ {
+ secondFunctionInvocations++;
+ return parameter;
+ }, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ Assert.Equal(cancellationTokenSource.Token, context.CancellationToken);
+
+ await next(context);
+
+ context.CancellationToken.ThrowIfCancellationRequested();
+ });
+
+ using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("filters_multiple_function_calls_test_response.json")) };
+ using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) };
+
+ this._messageHandlerStub.ResponsesToReturn = [response1, response2];
+
+ var arguments = new KernelArguments(new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions });
+
+ // Act & Assert
+ var exception = await Assert.ThrowsAsync(()
+ => kernel.InvokePromptAsync("Test prompt", arguments, cancellationToken: cancellationTokenSource.Token));
+
+ Assert.Equal(1, firstFunctionInvocations);
+ Assert.Equal(0, secondFunctionInvocations);
+ }
+
public void Dispose()
{
this._httpClient.Dispose();
diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenAI/KernelOpenAIPluginExtensions.cs b/dotnet/src/Functions/Functions.OpenApi/OpenAI/KernelOpenAIPluginExtensions.cs
index 1717106ba256..c5890d604d81 100644
--- a/dotnet/src/Functions/Functions.OpenApi/OpenAI/KernelOpenAIPluginExtensions.cs
+++ b/dotnet/src/Functions/Functions.OpenApi/OpenAI/KernelOpenAIPluginExtensions.cs
@@ -18,6 +18,7 @@ namespace Microsoft.SemanticKernel.Plugins.OpenApi;
///
/// Provides extension methods for importing plugins exposed through OpenAI's ChatGPT format.
///
+[Obsolete("This class is deprecated and will be removed in a future version.")]
public static class OpenAIPluginKernelExtensions
{
private static readonly JsonSerializerOptions s_jsonOptionsCache =
diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIAuthenticateRequestAsyncCallback.cs b/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIAuthenticateRequestAsyncCallback.cs
index 369e8e8694cd..b8d7d1015a3e 100644
--- a/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIAuthenticateRequestAsyncCallback.cs
+++ b/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIAuthenticateRequestAsyncCallback.cs
@@ -1,5 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.
+using System;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
@@ -14,4 +15,5 @@ namespace Microsoft.SemanticKernel.Plugins.OpenApi;
/// The used to authenticate.
/// The cancellation token.
/// A representing the asynchronous operation.
+[Obsolete("This delegate is deprecated and will be removed in a future version.")]
public delegate Task OpenAIAuthenticateRequestAsyncCallback(HttpRequestMessage request, string pluginName, OpenAIAuthenticationConfig openAIAuthConfig, CancellationToken cancellationToken = default);
diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIAuthenticationConfig.cs b/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIAuthenticationConfig.cs
index c4d1ff9caa09..5d01bc083f3a 100644
--- a/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIAuthenticationConfig.cs
+++ b/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIAuthenticationConfig.cs
@@ -9,6 +9,7 @@ namespace Microsoft.SemanticKernel.Plugins.OpenApi;
///
/// Represents the authentication section for an OpenAI plugin.
///
+[Obsolete("This class is deprecated and will be removed in a future version.")]
public class OpenAIAuthenticationConfig
{
///
@@ -57,6 +58,7 @@ public class OpenAIAuthenticationConfig
///
/// Represents the type of authentication for an OpenAI plugin.
///
+[Obsolete("This enum is deprecated and will be removed in a future version.")]
public enum OpenAIAuthenticationType
{
///
@@ -83,6 +85,7 @@ public enum OpenAIAuthenticationType
///
/// Represents the type of authorization for an OpenAI plugin.
///
+[Obsolete("This enum is deprecated and will be removed in a future version.")]
public enum OpenAIAuthorizationType
{
///
diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIFunctionExecutionParameters.cs b/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIFunctionExecutionParameters.cs
index bc2084fb21fb..5f04bec5c039 100644
--- a/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIFunctionExecutionParameters.cs
+++ b/dotnet/src/Functions/Functions.OpenApi/OpenAI/OpenAIFunctionExecutionParameters.cs
@@ -8,6 +8,7 @@ namespace Microsoft.SemanticKernel.Plugins.OpenApi;
///
/// OpenAI function execution parameters
///
+[Obsolete("This class is deprecated and will be removed in a future version.")]
public class OpenAIFunctionExecutionParameters : OpenApiFunctionExecutionParameters
{
///
diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenAI/KernelOpenAIPluginExtensionsTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenAI/KernelOpenAIPluginExtensionsTests.cs
index 36f7601dd02e..7c00e7ba375d 100644
--- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenAI/KernelOpenAIPluginExtensionsTests.cs
+++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/OpenAI/KernelOpenAIPluginExtensionsTests.cs
@@ -17,6 +17,7 @@
namespace SemanticKernel.Functions.UnitTests.OpenApi.OpenAI;
+[Obsolete("OpenAI plugins are deprecated and will be removed in a future version.")]
public sealed class KernelOpenAIPluginExtensionsTests : IDisposable
{
///
diff --git a/dotnet/src/IntegrationTests/Connectors/Google/Gemini/GeminiChatCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/Google/Gemini/GeminiChatCompletionTests.cs
index 321ede0ff115..5732a3e4719a 100644
--- a/dotnet/src/IntegrationTests/Connectors/Google/Gemini/GeminiChatCompletionTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/Google/Gemini/GeminiChatCompletionTests.cs
@@ -64,6 +64,104 @@ public async Task ChatStreamingReturnsValidResponseAsync(ServiceType serviceType
this.Output.WriteLine(message);
}
+ [RetryTheory]
+ [InlineData(ServiceType.GoogleAI, Skip = "This test is for manual verification.")]
+ [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")]
+ public async Task ChatGenerationOnlyAssistantMessagesReturnsValidResponseAsync(ServiceType serviceType)
+ {
+ // Arrange
+ var chatHistory = new ChatHistory();
+ chatHistory.AddAssistantMessage("I'm Brandon, I'm very thirsty");
+ chatHistory.AddAssistantMessage("Could you help me get some...");
+
+ var sut = this.GetChatService(serviceType);
+
+ // Act
+ var response = await sut.GetChatMessageContentAsync(chatHistory);
+
+ // Assert
+ Assert.NotNull(response.Content);
+ this.Output.WriteLine(response.Content);
+ string[] resultWords = ["drink", "water", "tea", "coffee", "juice", "soda"];
+ Assert.Contains(resultWords, word => response.Content.Contains(word, StringComparison.OrdinalIgnoreCase));
+ }
+
+ [RetryTheory]
+ [InlineData(ServiceType.GoogleAI, Skip = "This test is for manual verification.")]
+ [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")]
+ public async Task ChatStreamingOnlyAssistantMessagesReturnsValidResponseAsync(ServiceType serviceType)
+ {
+ // Arrange
+ var chatHistory = new ChatHistory();
+ chatHistory.AddAssistantMessage("I'm Brandon, I'm very thirsty");
+ chatHistory.AddAssistantMessage("Could you help me get some...");
+
+ var sut = this.GetChatService(serviceType);
+
+ // Act
+ var response =
+ await sut.GetStreamingChatMessageContentsAsync(chatHistory).ToListAsync();
+
+ // Assert
+ Assert.NotEmpty(response);
+ Assert.True(response.Count > 1);
+ var message = string.Concat(response.Select(c => c.Content));
+ this.Output.WriteLine(message);
+ string[] resultWords = ["drink", "water", "tea", "coffee", "juice", "soda"];
+ Assert.Contains(resultWords, word => message.Contains(word, StringComparison.OrdinalIgnoreCase));
+ }
+
+ [RetryTheory]
+ [InlineData(ServiceType.GoogleAI, Skip = "This test is for manual verification.")]
+ [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")]
+ public async Task ChatGenerationWithSystemMessagesAsync(ServiceType serviceType)
+ {
+ // Arrange
+ var chatHistory = new ChatHistory("You are helpful assistant. Your name is Roger.");
+ chatHistory.AddSystemMessage("You know ACDD equals 1520");
+ chatHistory.AddUserMessage("Hello, I'm Brandon, how are you?");
+ chatHistory.AddAssistantMessage("I'm doing well, thanks for asking.");
+ chatHistory.AddUserMessage("Tell me your name and the value of ACDD.");
+
+ var sut = this.GetChatService(serviceType);
+
+ // Act
+ var response = await sut.GetChatMessageContentAsync(chatHistory);
+
+ // Assert
+ Assert.NotNull(response.Content);
+ this.Output.WriteLine(response.Content);
+ Assert.Contains("1520", response.Content, StringComparison.OrdinalIgnoreCase);
+ Assert.Contains("Roger", response.Content, StringComparison.OrdinalIgnoreCase);
+ }
+
+ [RetryTheory]
+ [InlineData(ServiceType.GoogleAI, Skip = "This test is for manual verification.")]
+ [InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")]
+ public async Task ChatStreamingWithSystemMessagesAsync(ServiceType serviceType)
+ {
+ // Arrange
+ var chatHistory = new ChatHistory("You are helpful assistant. Your name is Roger.");
+ chatHistory.AddSystemMessage("You know ACDD equals 1520");
+ chatHistory.AddUserMessage("Hello, I'm Brandon, how are you?");
+ chatHistory.AddAssistantMessage("I'm doing well, thanks for asking.");
+ chatHistory.AddUserMessage("Tell me your name and the value of ACDD.");
+
+ var sut = this.GetChatService(serviceType);
+
+ // Act
+ var response =
+ await sut.GetStreamingChatMessageContentsAsync(chatHistory).ToListAsync();
+
+ // Assert
+ Assert.NotEmpty(response);
+ Assert.True(response.Count > 1);
+ var message = string.Concat(response.Select(c => c.Content));
+ this.Output.WriteLine(message);
+ Assert.Contains("1520", message, StringComparison.OrdinalIgnoreCase);
+ Assert.Contains("Roger", message, StringComparison.OrdinalIgnoreCase);
+ }
+
[RetryTheory]
[InlineData(ServiceType.GoogleAI, Skip = "This test is for manual verification.")]
[InlineData(ServiceType.VertexAI, Skip = "This test is for manual verification.")]
diff --git a/dotnet/src/IntegrationTests/Connectors/HuggingFace/ChatCompletion/HuggingFaceChatCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/HuggingFace/ChatCompletion/HuggingFaceChatCompletionTests.cs
new file mode 100644
index 000000000000..cca6f6703fcb
--- /dev/null
+++ b/dotnet/src/IntegrationTests/Connectors/HuggingFace/ChatCompletion/HuggingFaceChatCompletionTests.cs
@@ -0,0 +1,137 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Text;
+using System.Threading.Tasks;
+using Microsoft.Extensions.Configuration;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.HuggingFace;
+using Xunit;
+
+namespace SemanticKernel.IntegrationTests.Connectors.HuggingFace.ChatCompletion;
+
+///
+/// Integration tests for .
+///
+///
+/// Instructions for setting up a Text Generation Inference (TGI) endpoint, see: https://huggingface.co/blog/tgi-messages-api
+///
+public sealed class HuggingFaceChatCompletionTests
+{
+ private const string Endpoint = "https://.endpoints.huggingface.cloud/v1/";
+ private const string Model = "tgi";
+
+ private readonly IConfigurationRoot _configuration;
+
+ public HuggingFaceChatCompletionTests()
+ {
+ // Load configuration
+ this._configuration = new ConfigurationBuilder()
+ .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true)
+ .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true)
+ .AddEnvironmentVariables()
+ .AddUserSecrets()
+ .Build();
+ }
+
+ [Fact(Skip = "This test is for manual verification.")]
+ public async Task GetChatMessageContentsAsync()
+ {
+ // Arrange
+ var chatHistory = new ChatHistory
+ {
+ new ChatMessageContent(AuthorRole.System, "Use C# 12 features."),
+ new ChatMessageContent(AuthorRole.User, "Write a C# Hello world?")
+ };
+ var huggingFaceRemote = new HuggingFaceChatCompletionService(Model, endpoint: new Uri(Endpoint), apiKey: this.GetApiKey());
+
+ // Act
+ var response = await huggingFaceRemote.GetChatMessageContentsAsync(chatHistory, new HuggingFacePromptExecutionSettings() { MaxNewTokens = 50 });
+
+ // Assert
+ Assert.NotNull(response);
+ Assert.Single(response);
+ Assert.True(response[0].Content?.Length > 0);
+ }
+
+ [Fact(Skip = "This test is for manual verification.")]
+ public async Task GetStreamingChatMessageContentsAsync()
+ {
+ // Arrange
+ var chatHistory = new ChatHistory
+ {
+ new ChatMessageContent(AuthorRole.System, "Use C# 12 features."),
+ new ChatMessageContent(AuthorRole.User, "Write a C# Hello world?")
+ };
+ var huggingFaceRemote = new HuggingFaceChatCompletionService(Model, endpoint: new Uri(Endpoint), apiKey: this.GetApiKey());
+
+ // Act
+ var response = new StringBuilder();
+ await foreach (var update in huggingFaceRemote.GetStreamingChatMessageContentsAsync(chatHistory, new HuggingFacePromptExecutionSettings() { MaxNewTokens = 50 }))
+ {
+ if (update.Content is { Length: > 0 })
+ {
+ response.Append(update.Content);
+ }
+ }
+
+ // Assert
+ Assert.NotNull(response);
+ Assert.True(response.Length > 0);
+ }
+
+ [Fact(Skip = "This test is for manual verification.")]
+ public async Task InvokeKernelFunctionAsync()
+ {
+ // Arrange
+ Kernel kernel = Kernel.CreateBuilder()
+ .AddHuggingFaceChatCompletion(Model, endpoint: new Uri(Endpoint), apiKey: this.GetApiKey())
+ .Build();
+
+ var kernelFunction = kernel.CreateFunctionFromPrompt("Write a C# Hello world", new HuggingFacePromptExecutionSettings
+ {
+ MaxNewTokens = 50,
+ });
+
+ // Act
+ var response = await kernel.InvokeAsync(kernelFunction);
+
+ // Assert
+ Assert.NotNull(response);
+ Assert.True(response.ToString().Length > 0);
+ }
+
+ [Fact(Skip = "This test is for manual verification.")]
+ public async Task InvokeKernelFunctionStreamingAsync()
+ {
+ // Arrange
+ Kernel kernel = Kernel.CreateBuilder()
+ .AddHuggingFaceChatCompletion(Model, endpoint: new Uri(Endpoint), apiKey: this.GetApiKey())
+ .Build();
+
+ var kernelFunction = kernel.CreateFunctionFromPrompt("Write a C# Hello world", new HuggingFacePromptExecutionSettings
+ {
+ MaxNewTokens = 50,
+ });
+
+ // Act
+ var response = new StringBuilder();
+ await foreach (var update in kernel.InvokeStreamingAsync(kernelFunction))
+ {
+ if (update.ToString() is { Length: > 0 })
+ {
+ response.Append(update.ToString());
+ }
+ }
+
+ // Assert
+ Assert.NotNull(response);
+ Assert.True(response.ToString().Length > 0);
+ }
+
+ private string GetApiKey()
+ {
+ return this._configuration.GetSection("HuggingFace:ApiKey").Get()!;
+ }
+}
diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/Milvus/MilvusMemoryStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/Milvus/MilvusMemoryStoreTests.cs
index 0ed028eba747..5fba220a3ad4 100644
--- a/dotnet/src/IntegrationTests/Connectors/Memory/Milvus/MilvusMemoryStoreTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/Memory/Milvus/MilvusMemoryStoreTests.cs
@@ -220,6 +220,45 @@ public async Task GetNearestMatchesAsync(bool withEmbeddings)
});
}
+ [Theory]
+ [InlineData(true)]
+ [InlineData(false)]
+ public async Task GetNearestMatchesWithMetricTypeAsync(bool withEmbeddings)
+ {
+ //Create collection with default, Ip metric
+ await this.Store.CreateCollectionAsync(CollectionName);
+ await this.InsertSampleDataAsync();
+ await this.Store.Client.FlushAsync([CollectionName]);
+
+ //Search with Ip metric, run correctly
+ List<(MemoryRecord Record, double SimilarityScore)> ipResults =
+ this.Store.GetNearestMatchesAsync(CollectionName, new[] { 5f, 6f, 7f, 8f, 9f }, limit: 2, withEmbeddings: withEmbeddings).ToEnumerable().ToList();
+
+ Assert.All(ipResults, t => Assert.True(t.SimilarityScore > 0));
+
+ //Set the store to Cosine metric, without recreate collection
+ this.Store = new(this._milvusFixture.Host, vectorSize: 5, port: this._milvusFixture.Port, metricType: SimilarityMetricType.Cosine, consistencyLevel: ConsistencyLevel.Strong);
+
+ //An exception will be thrown here, the exception message includes "metric type not match"
+ MilvusException milvusException = Assert.Throws(() => this.Store.GetNearestMatchesAsync(CollectionName, new[] { 5f, 6f, 7f, 8f, 9f }, limit: 2, withEmbeddings: withEmbeddings).ToEnumerable().ToList());
+
+ Assert.NotNull(milvusException);
+
+ Assert.Contains("metric type not match", milvusException.Message);
+
+ //Recreate collection with Cosine metric
+ await this.Store.DeleteCollectionAsync(CollectionName);
+ await this.Store.CreateCollectionAsync(CollectionName);
+ await this.InsertSampleDataAsync();
+ await this.Store.Client.FlushAsync([CollectionName]);
+
+ //Search with Ip metric, run correctly
+ List<(MemoryRecord Record, double SimilarityScore)> cosineResults =
+ this.Store.GetNearestMatchesAsync(CollectionName, new[] { 5f, 6f, 7f, 8f, 9f }, limit: 2, withEmbeddings: withEmbeddings).ToEnumerable().ToList();
+
+ Assert.All(cosineResults, t => Assert.True(t.SimilarityScore > 0));
+ }
+
[Fact]
public async Task GetNearestMatchesWithMinRelevanceScoreAsync()
{
diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs
index 03cd3429d4b0..675661b76d83 100644
--- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs
@@ -128,6 +128,11 @@ public async Task AzureOpenAIStreamingTestAsync(bool useChatModel, string prompt
// Act
await foreach (var content in target.InvokeStreamingAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }))
{
+ if (content is StreamingChatMessageContent messageContent)
+ {
+ Assert.NotNull(messageContent.Role);
+ }
+
fullResult.Append(content);
}
diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIFileServiceTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIFileServiceTests.cs
new file mode 100644
index 000000000000..30b0c3d1115b
--- /dev/null
+++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIFileServiceTests.cs
@@ -0,0 +1,156 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Threading.Tasks;
+using Microsoft.Extensions.Configuration;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
+using SemanticKernel.IntegrationTests.TestSettings;
+using Xunit;
+using Xunit.Abstractions;
+
+namespace SemanticKernel.IntegrationTests.Connectors.OpenAI;
+
+#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only.
+
+public sealed class OpenAIFileServiceTests(ITestOutputHelper output) : IDisposable
+{
+ private readonly IConfigurationRoot _configuration = new ConfigurationBuilder()
+ .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true)
+ .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true)
+ .AddEnvironmentVariables()
+ .AddUserSecrets()
+ .Build();
+
+ [Theory(Skip = "OpenAI will often throttle requests. This test is for manual verification.")]
+ [InlineData("test_image_001.jpg", "image/jpeg")]
+ [InlineData("test_content.txt", "text/plain")]
+ public async Task OpenAIFileServiceLifecycleAsync(string fileName, string mimeType)
+ {
+ // Arrange
+ OpenAIFileService fileService = this.CreateOpenAIFileService();
+
+ // Act & Assert
+ await this.VerifyFileServiceLifecycleAsync(fileService, fileName, mimeType);
+ }
+
+ [Theory]
+ [InlineData("test_image_001.jpg", "image/jpeg")]
+ [InlineData("test_content.txt", "text/plain")]
+ public async Task AzureOpenAIFileServiceLifecycleAsync(string fileName, string mimeType)
+ {
+ // Arrange
+ OpenAIFileService fileService = this.CreateOpenAIFileService();
+
+ // Act & Assert
+ await this.VerifyFileServiceLifecycleAsync(fileService, fileName, mimeType);
+ }
+
+ private async Task VerifyFileServiceLifecycleAsync(OpenAIFileService fileService, string fileName, string mimeType)
+ {
+ // Setup file content
+ await using FileStream fileStream = File.OpenRead($"./TestData/{fileName}");
+ BinaryData sourceData = await BinaryData.FromStreamAsync(fileStream);
+ BinaryContent sourceContent = new(sourceData.ToArray(), mimeType);
+
+ // Upload file with unsupported purpose (failure case)
+ await Assert.ThrowsAsync(() => fileService.UploadContentAsync(sourceContent, new(fileName, OpenAIFilePurpose.AssistantsOutput)));
+
+ // Upload file with wacky purpose (failure case)
+ await Assert.ThrowsAsync(() => fileService.UploadContentAsync(sourceContent, new(fileName, new OpenAIFilePurpose("pretend"))));
+
+ // Upload file
+ OpenAIFileReference fileReference = await fileService.UploadContentAsync(sourceContent, new(fileName, OpenAIFilePurpose.FineTune));
+ try
+ {
+ AssertFileReferenceEquals(fileReference, fileName, sourceData.Length, OpenAIFilePurpose.FineTune);
+
+ // Retrieve files by different purpose
+ Dictionary fileMap = await GetFilesAsync(fileService, OpenAIFilePurpose.Assistants);
+ Assert.DoesNotContain(fileReference.Id, fileMap.Keys);
+
+ // Retrieve files by wacky purpose (failure case)
+ await Assert.ThrowsAsync(() => GetFilesAsync(fileService, new OpenAIFilePurpose("pretend")));
+
+ // Retrieve files by expected purpose
+ fileMap = await GetFilesAsync(fileService, OpenAIFilePurpose.FineTune);
+ Assert.Contains(fileReference.Id, fileMap.Keys);
+ AssertFileReferenceEquals(fileMap[fileReference.Id], fileName, sourceData.Length, OpenAIFilePurpose.FineTune);
+
+ // Retrieve files by no specific purpose
+ fileMap = await GetFilesAsync(fileService);
+ Assert.Contains(fileReference.Id, fileMap.Keys);
+ AssertFileReferenceEquals(fileMap[fileReference.Id], fileName, sourceData.Length, OpenAIFilePurpose.FineTune);
+
+ // Retrieve file by id
+ OpenAIFileReference file = await fileService.GetFileAsync(fileReference.Id);
+ AssertFileReferenceEquals(file, fileName, sourceData.Length, OpenAIFilePurpose.FineTune);
+
+ // Retrieve file content
+ BinaryContent retrievedContent = await fileService.GetFileContentAsync(fileReference.Id);
+ Assert.NotNull(retrievedContent.Data);
+ Assert.NotNull(retrievedContent.Uri);
+ Assert.NotNull(retrievedContent.Metadata);
+ Assert.Equal(fileReference.Id, retrievedContent.Metadata["id"]);
+ Assert.Equal(sourceContent.Data!.Value.Length, retrievedContent.Data.Value.Length);
+ }
+ finally
+ {
+ // Delete file
+ await fileService.DeleteFileAsync(fileReference.Id);
+ }
+ }
+
+ private static void AssertFileReferenceEquals(OpenAIFileReference fileReference, string expectedFileName, int expectedSize, OpenAIFilePurpose expectedPurpose)
+ {
+ Assert.Equal(expectedFileName, fileReference.FileName);
+ Assert.Equal(expectedPurpose, fileReference.Purpose);
+ Assert.Equal(expectedSize, fileReference.SizeInBytes);
+ }
+
+ private static async Task> GetFilesAsync(OpenAIFileService fileService, OpenAIFilePurpose? purpose = null)
+ {
+ IEnumerable files = await fileService.GetFilesAsync(purpose);
+ Dictionary fileIds = files.DistinctBy(f => f.Id).ToDictionary(f => f.Id);
+ return fileIds;
+ }
+
+ #region internals
+
+ private readonly XunitLogger _logger = new(output);
+ private readonly RedirectOutput _testOutputHelper = new(output);
+
+ public void Dispose()
+ {
+ this._logger.Dispose();
+ this._testOutputHelper.Dispose();
+ }
+
+ private OpenAIFileService CreateOpenAIFileService()
+ {
+ var openAIConfiguration = this._configuration.GetSection("OpenAI").Get();
+
+ Assert.NotNull(openAIConfiguration);
+ Assert.NotNull(openAIConfiguration.ApiKey);
+ Assert.NotNull(openAIConfiguration.ServiceId);
+
+ return new(openAIConfiguration.ApiKey, openAIConfiguration.ServiceId, loggerFactory: this._logger);
+ }
+
+ private OpenAIFileService CreateAzureOpenAIFileService()
+ {
+ var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get();
+
+ Assert.NotNull(azureOpenAIConfiguration);
+ Assert.NotNull(azureOpenAIConfiguration.Endpoint);
+ Assert.NotNull(azureOpenAIConfiguration.ApiKey);
+ Assert.NotNull(azureOpenAIConfiguration.ServiceId);
+
+ return new(new Uri(azureOpenAIConfiguration.Endpoint), azureOpenAIConfiguration.ApiKey, azureOpenAIConfiguration.ServiceId, loggerFactory: this._logger);
+ }
+
+ #endregion
+}
diff --git a/dotnet/src/IntegrationTests/IntegrationTests.csproj b/dotnet/src/IntegrationTests/IntegrationTests.csproj
index 63e4ec8d28fe..df5afa473ce7 100644
--- a/dotnet/src/IntegrationTests/IntegrationTests.csproj
+++ b/dotnet/src/IntegrationTests/IntegrationTests.csproj
@@ -92,9 +92,6 @@
Always
-
- Always
- PreserveNewest
@@ -122,10 +119,10 @@
PreserveNewest
-
+ Always
-
+ Always
diff --git a/dotnet/src/IntegrationTests/Plugins/PluginTests.cs b/dotnet/src/IntegrationTests/Plugins/OpenApi/OpenApiPluginsTests.cs
similarity index 52%
rename from dotnet/src/IntegrationTests/Plugins/PluginTests.cs
rename to dotnet/src/IntegrationTests/Plugins/OpenApi/OpenApiPluginsTests.cs
index 8275a99e7423..46aad7f7b0d0 100644
--- a/dotnet/src/IntegrationTests/Plugins/PluginTests.cs
+++ b/dotnet/src/IntegrationTests/Plugins/OpenApi/OpenApiPluginsTests.cs
@@ -7,74 +7,10 @@
using Microsoft.SemanticKernel.Plugins.OpenApi;
using Xunit;
-namespace SemanticKernel.IntegrationTests.Plugins;
+namespace SemanticKernel.IntegrationTests.Plugins.OpenApi;
public class PluginTests
{
- [Theory]
- [InlineData("https://www.klarna.com/.well-known/ai-plugin.json", "Klarna", "productsUsingGET", "Laptop", 3, 200, "US")]
- public async Task QueryKlarnaOpenAIPluginAsync(
- string pluginEndpoint,
- string name,
- string functionName,
- string query,
- int size,
- int budget,
- string countryCode)
- {
- // Arrange
- var kernel = new Kernel();
- using HttpClient httpClient = new();
-
- var plugin = await kernel.ImportPluginFromOpenAIAsync(
- name,
- new Uri(pluginEndpoint),
- new OpenAIFunctionExecutionParameters(httpClient));
-
- var arguments = new KernelArguments
- {
- ["q"] = query,
- ["size"] = size,
- ["max_price"] = budget,
- ["countryCode"] = countryCode
- };
-
- // Act
- await plugin[functionName].InvokeAsync(kernel, arguments);
- }
-
- [Theory]
- [InlineData("https://www.klarna.com/us/shopping/public/openai/v0/api-docs/", "Klarna", "productsUsingGET", "Laptop", 3, 200, "US")]
- public async Task QueryKlarnaOpenApiPluginAsync(
- string pluginEndpoint,
- string name,
- string functionName,
- string query,
- int size,
- int budget,
- string countryCode)
- {
- // Arrange
- var kernel = new Kernel();
- using HttpClient httpClient = new();
-
- var plugin = await kernel.ImportPluginFromOpenApiAsync(
- name,
- new Uri(pluginEndpoint),
- new OpenApiFunctionExecutionParameters(httpClient));
-
- var arguments = new KernelArguments
- {
- ["q"] = query,
- ["size"] = size.ToString(System.Globalization.CultureInfo.InvariantCulture),
- ["max_price"] = budget,
- ["countryCode"] = countryCode
- };
-
- // Act
- await plugin[functionName].InvokeAsync(kernel, arguments);
- }
-
[Theory]
[InlineData("https://www.klarna.com/us/shopping/public/openai/v0/api-docs/", "Klarna", "productsUsingGET", "Laptop", 3, 200, "US")]
public async Task QueryKlarnaOpenApiPluginRunAsync(
@@ -99,7 +35,7 @@ public async Task QueryKlarnaOpenApiPluginRunAsync(
{
["q"] = query,
["size"] = size,
- ["budget"] = budget.ToString(System.Globalization.CultureInfo.InvariantCulture),
+ ["max_price"] = budget.ToString(System.Globalization.CultureInfo.InvariantCulture),
["countryCode"] = countryCode
};
@@ -114,38 +50,7 @@ public async Task QueryKlarnaOpenApiPluginRunAsync(
}
[Theory]
- [InlineData("https://raw.githubusercontent.com/sisbell/chatgpt-plugin-store/main/manifests/instacart.com.json",
- "Instacart",
- "create",
- """{"title":"Shopping List", "ingredients": ["Flour"], "question": "what ingredients do I need to make chocolate cookies?", "partner_name": "OpenAI" }"""
- )]
- public async Task QueryInstacartPluginAsync(
- string pluginEndpoint,
- string name,
- string functionName,
- string payload)
- {
- // Arrange
- var kernel = new Kernel();
- using HttpClient httpClient = new();
-
- //note that this plugin is not compliant according to the underlying validator in SK
- var plugin = await kernel.ImportPluginFromOpenAIAsync(
- name,
- new Uri(pluginEndpoint),
- new OpenAIFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true, EnableDynamicPayload = false });
-
- var arguments = new KernelArguments
- {
- ["payload"] = payload
- };
-
- // Act
- await plugin[functionName].InvokeAsync(kernel, arguments);
- }
-
- [Theory]
- [InlineData("Plugins/instacart-ai-plugin.json",
+ [InlineData("Plugins/OpenApi/instacart-service.yaml",
"Instacart",
"create",
"""{"title":"Shopping List", "ingredients": ["Flour"], "question": "what ingredients do I need to make chocolate cookies?", "partner_name": "OpenAI" }"""
@@ -162,10 +67,10 @@ public async Task QueryInstacartPluginFromStreamAsync(
var kernel = new Kernel();
// note that this plugin is not compliant according to the underlying validator in SK
- var plugin = await kernel.ImportPluginFromOpenAIAsync(
+ var plugin = await kernel.ImportPluginFromOpenApiAsync(
name,
stream,
- new OpenAIFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true, EnableDynamicPayload = false });
+ new OpenApiFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true, EnableDynamicPayload = false });
var arguments = new KernelArguments
{
@@ -177,7 +82,7 @@ public async Task QueryInstacartPluginFromStreamAsync(
}
[Theory]
- [InlineData("Plugins/instacart-ai-plugin.json",
+ [InlineData("Plugins/OpenApi/instacart-service.yaml",
"Instacart",
"create",
"""{"title":"Shopping List", "ingredients": ["Flour"], "question": "what ingredients do I need to make chocolate cookies?", "partner_name": "OpenAI" }"""
@@ -193,10 +98,10 @@ public async Task QueryInstacartPluginUsingRelativeFilePathAsync(
using HttpClient httpClient = new();
// note that this plugin is not compliant according to the underlying validator in SK
- var plugin = await kernel.ImportPluginFromOpenAIAsync(
+ var plugin = await kernel.ImportPluginFromOpenApiAsync(
name,
pluginFilePath,
- new OpenAIFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true, EnableDynamicPayload = false });
+ new OpenApiFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true, EnableDynamicPayload = false });
var arguments = new KernelArguments
{
@@ -208,7 +113,7 @@ public async Task QueryInstacartPluginUsingRelativeFilePathAsync(
}
[Theory]
- [InlineData("Plugins/instacart-ai-plugin.json", "Instacart", "create")]
+ [InlineData("Plugins/OpenApi/instacart-service.yaml", "Instacart", "create")]
public async Task QueryInstacartPluginWithDynamicPayloadAsync(
string pluginFilePath,
string name,
@@ -220,10 +125,10 @@ public async Task QueryInstacartPluginWithDynamicPayloadAsync(
var kernel = new Kernel();
// note that this plugin is not compliant according to the underlying validator in SK
- var plugin = await kernel.ImportPluginFromOpenAIAsync(
+ var plugin = await kernel.ImportPluginFromOpenApiAsync(
name,
stream,
- new OpenAIFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true, EnableDynamicPayload = true });
+ new OpenApiFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true, EnableDynamicPayload = true });
var arguments = new KernelArguments
{
diff --git a/dotnet/src/IntegrationTests/Plugins/RepairServiceTests.cs b/dotnet/src/IntegrationTests/Plugins/OpenApi/RepairServiceTests.cs
similarity index 94%
rename from dotnet/src/IntegrationTests/Plugins/RepairServiceTests.cs
rename to dotnet/src/IntegrationTests/Plugins/OpenApi/RepairServiceTests.cs
index 9d8610806d8c..f6bcb3c01be8 100644
--- a/dotnet/src/IntegrationTests/Plugins/RepairServiceTests.cs
+++ b/dotnet/src/IntegrationTests/Plugins/OpenApi/RepairServiceTests.cs
@@ -8,7 +8,7 @@
using Microsoft.SemanticKernel.Plugins.OpenApi;
using Xunit;
-namespace SemanticKernel.IntegrationTests.Plugins;
+namespace SemanticKernel.IntegrationTests.Plugins.OpenApi;
public class RepairServiceTests
{
@@ -23,7 +23,7 @@ public async Task ValidateInvokingRepairServicePluginAsync()
var plugin = await kernel.ImportPluginFromOpenApiAsync(
"RepairService",
stream,
- new OpenAIFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true, EnableDynamicPayload = false });
+ new OpenApiFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true, EnableDynamicPayload = false });
var arguments = new KernelArguments
{
@@ -79,7 +79,7 @@ public async Task HttpOperationExceptionIncludeRequestInfoAsync()
var plugin = await kernel.ImportPluginFromOpenApiAsync(
"RepairService",
stream,
- new OpenAIFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true, EnableDynamicPayload = false });
+ new OpenApiFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true, EnableDynamicPayload = false });
var arguments = new KernelArguments
{
@@ -121,7 +121,7 @@ public async Task UseDelegatingHandlerAsync()
var plugin = await kernel.ImportPluginFromOpenApiAsync(
"RepairService",
stream,
- new OpenAIFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true, EnableDynamicPayload = false });
+ new OpenApiFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true, EnableDynamicPayload = false });
// List All Repairs
var result = await plugin["listRepairs"].InvokeAsync(kernel);
diff --git a/dotnet/src/IntegrationTests/Plugins/OpenApi/instacart-service.yaml b/dotnet/src/IntegrationTests/Plugins/OpenApi/instacart-service.yaml
new file mode 100644
index 000000000000..9bb3fb3cd26f
--- /dev/null
+++ b/dotnet/src/IntegrationTests/Plugins/OpenApi/instacart-service.yaml
@@ -0,0 +1,53 @@
+openapi: 3.0.1
+info:
+ title: Instacart
+ description: Order from your favorite local grocery stores.
+ version: 'v2.1'
+servers:
+ - url: https://www.instacart.com
+paths:
+ /rest/llm_integration/openapi/v2_1/recipes:
+ post:
+ operationId: create
+ summary: Create an Instacart link to the shopping list of ingredients.
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/createRequest'
+ responses:
+ "200":
+ description: Instacart link to the shopping list of ingredients.
+ "400":
+ description: Could not create an Instacart link to the shopping list of ingredients.
+components:
+ schemas:
+ createRequest:
+ type: object
+ required:
+ - title
+ - ingredients
+ - instructions
+ - question
+ - partner_name
+ properties:
+ title:
+ type: string
+ description: Recipe title (e.g. "Vanilla Yogurt Parfait")
+ ingredients:
+ type: array
+ items:
+ type: string
+ description: List of strings where each element is a recipe ingredient (e.g. ["2 cups of greek yogurt", "2 tablespoons of honey", "1 teaspoon of vanilla extract"]). Don't include items in the list that the user already mentioned they have.
+ instructions:
+ type: array
+ items:
+ type: string
+ description: List of strings where each element is a recipe instruction
+ question:
+ type: string
+ description: This field stores the question asked by the user about recipe or mealplan in the current chat session. For instance, a user can ask "recipe for chocolate cookies" and the assistant responds by listing the ingredients needed to make chocolate cookies. In this chat interaction, we need to return "recipe for chocolate cookies" as the value in this field
+ partner_name:
+ type: string
+ description: The value used to populate this field should always be "OpenAI"
diff --git a/dotnet/src/IntegrationTests/Plugins/repair-service.json b/dotnet/src/IntegrationTests/Plugins/OpenApi/repair-service.json
similarity index 100%
rename from dotnet/src/IntegrationTests/Plugins/repair-service.json
rename to dotnet/src/IntegrationTests/Plugins/OpenApi/repair-service.json
diff --git a/dotnet/src/IntegrationTests/Plugins/instacart-ai-plugin.json b/dotnet/src/IntegrationTests/Plugins/instacart-ai-plugin.json
deleted file mode 100644
index 3dc0bcb3925e..000000000000
--- a/dotnet/src/IntegrationTests/Plugins/instacart-ai-plugin.json
+++ /dev/null
@@ -1,17 +0,0 @@
-{
- "schema_version": "v1",
- "name_for_model": "Instacart",
- "name_for_human": "Instacart",
- "description_for_model": "The Instacart plugin can be used to create a shopping list when a recipe or meal plan is mentioned. Exclude items the user already has from the shopping list. If multiple options for ingredients are provided, separate them into individual ingredients in the shopping list. Always save the user's question in the question field. Always populate the 'partner_name' field with value 'OpenAI'. Here are some examples of food or grocery related queries where the Instacart plugin can be used: chocolate cake recipe, dinner plan for a vegetarian family of 4, instructions to make orange chicken, healthy snacks for kids to bring to school, vegetarian dinner party dish ideas, lunch ideas for a picnic in the park, how to make lasagna, dinner ideas, meal plan for a family of 4, breakfast suggestions, snack ideas for a movie night with friends, squash and pasta dish with additional ingredient suggestions. Parse all ingredients referenced, including optional ingredients, as separate ingredients so that they can be correctly interpreted as unique items by the plugin. Do include optional ingredients and separate the items in the shopping list. Always keep the ingredient quantity. Ask the users if they want to purchase the ingredients and related items on Instacart, and if users agree, generate a shopping list using the Instacart plugin and response with: Here's your Instacart shopping list for [Recipe Name]. \n\nAdditional guidelines and examples: \n\n**Guideline**\n- For ingredient containing bundled items such as 'butter or margarine', 'chicken broth or wine', 'mixed nuts (e.g. ...)', 'vegetables (...)', and 'Optional toppings (...)' you need to separate the ingredient into individual items in the shopping list, e.g. 'butter or margarine' becomes ['butter', 'margarine'] in the shopping list, 'Raw vegetables (e.g., carrot sticks, cucumber slices, bell pepper strips)' becomes ['carrot sticks', 'cucumber slices', 'bell pepper strips']\n- If users say they have something, mark it as \"you already have\" in the list and don't add it to the shopping list\n\nExample 1: \nuser: garlic butter shrimp recipe \nassistant: Here's a delicious garlic butter shrimp recipe: Ingredients: 1 pound large shrimp ... 1/4 cup chicken broth or white wine (optional) Salt and pepper to taste ... \n**Note that the shopping list should contain ['1/4 cup chicken broth', '1/4 cup white wine', 'Salt', 'pepper', ...] instead of ['1/4 cup chicken broth or white wine (optional)', 'Salt and pepper to taste', ...]\n\nExample 2: \nuser: I have squash and pasta. what can I make and what other ingredients do I need? \nassistant: You can make a delicious squash and pasta dish with just a few additional ingredients. Here's a simple recipe: Ingredients: Squash (you already have) Pasta (you already have) Olive oil onion garlic Salt and pepper, ... \n**Note that the shopping list should contain ['Olive oil', 'onion', 'garlic', 'salt', 'pepper', ...] but without 'Squash' or 'Pasta' in it since user has them already.",
- "description_for_human": "Whatâs cookin'? Ask about recipes, meal plans, & more -- and get ingredients delivered from 40,000+ stores!",
- "auth": {
- "type": "none"
- },
- "api": {
- "type": "openapi",
- "url": "https://www.instacart.com/rest/llm_integration/config/openapi.yaml"
- },
- "logo_url": "https://www.instacart.com/assets/beetstrap/brand/2022/carrotlogo-1286c257354036d178c09e815906198eb7f012b8cdc4f6f8ec86d3e64d799a5b.png",
- "contact_email": "help@instacart.com",
- "legal_info_url": "https://www.instacart.com/terms"
-}
\ No newline at end of file
diff --git a/dotnet/src/IntegrationTests/PromptTests.cs b/dotnet/src/IntegrationTests/PromptTests.cs
index 9c23661c6c96..7b252713d24c 100644
--- a/dotnet/src/IntegrationTests/PromptTests.cs
+++ b/dotnet/src/IntegrationTests/PromptTests.cs
@@ -57,7 +57,7 @@ public async Task GenerateStoryTestAsync(string resourceName, bool isHandlebars)
});
// Assert
- Assert.Contains("Dog", actual.GetValue(), StringComparison.OrdinalIgnoreCase);
+ Assert.True(actual.GetValue()?.Length > 0);
}
#region private methods
diff --git a/dotnet/src/IntegrationTests/TestData/test_content.txt b/dotnet/src/IntegrationTests/TestData/test_content.txt
new file mode 100644
index 000000000000..447ce0649e56
--- /dev/null
+++ b/dotnet/src/IntegrationTests/TestData/test_content.txt
@@ -0,0 +1,9 @@
+Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Amet dictum sit amet justo donec enim diam vulputate ut. Nibh ipsum consequat nisl vel pretium lectus. Urna nec tincidunt praesent semper feugiat. Tristique nulla aliquet enim tortor. Ut morbi tincidunt augue interdum velit euismod in pellentesque massa. Ullamcorper morbi tincidunt ornare massa eget egestas purus viverra. Commodo ullamcorper a lacus vestibulum sed arcu non. Volutpat ac tincidunt vitae semper quis lectus nulla. Sem nulla pharetra diam sit amet nisl. Viverra aliquet eget sit amet tellus cras adipiscing enim eu.
+
+Morbi blandit cursus risus at ultrices mi tempus. Sagittis orci a scelerisque purus. Iaculis nunc sed augue lacus viverra. Accumsan sit amet nulla facilisi morbi tempus iaculis. Nisl rhoncus mattis rhoncus urna neque. Commodo odio aenean sed adipiscing diam donec adipiscing tristique. Tristique senectus et netus et malesuada fames. Nascetur ridiculus mus mauris vitae ultricies leo integer. Ut sem viverra aliquet eget. Sed egestas egestas fringilla phasellus faucibus scelerisque.
+
+In tellus integer feugiat scelerisque varius morbi. Vitae proin sagittis nisl rhoncus mattis rhoncus urna neque. Cum sociis natoque penatibus et magnis dis. Iaculis at erat pellentesque adipiscing commodo elit at imperdiet dui. Praesent semper feugiat nibh sed pulvinar proin gravida hendrerit lectus. Consectetur a erat nam at lectus urna. Hac habitasse platea dictumst vestibulum rhoncus est pellentesque elit. Aliquam vestibulum morbi blandit cursus risus at ultrices. Eu non diam phasellus vestibulum lorem sed. Risus pretium quam vulputate dignissim suspendisse in est. Elit scelerisque mauris pellentesque pulvinar pellentesque habitant morbi. At varius vel pharetra vel turpis nunc eget. Aliquam malesuada bibendum arcu vitae. At consectetur lorem donec massa. Mi sit amet mauris commodo. Maecenas volutpat blandit aliquam etiam erat velit. Nullam ac tortor vitae purus faucibus ornare suspendisse.
+
+Facilisi nullam vehicula ipsum a arcu cursus vitae. Commodo sed egestas egestas fringilla phasellus. Lacus luctus accumsan tortor posuere ac ut consequat. Adipiscing commodo elit at imperdiet dui accumsan sit. Non tellus orci ac auctor augue. Viverra aliquet eget sit amet tellus. Luctus venenatis lectus magna fringilla urna porttitor rhoncus dolor. Mattis enim ut tellus elementum. Nunc sed id semper risus. At augue eget arcu dictum.
+
+Ullamcorper a lacus vestibulum sed arcu non. Vitae tortor condimentum lacinia quis vel. Dui faucibus in ornare quam viverra. Vel pharetra vel turpis nunc eget. In egestas erat imperdiet sed euismod nisi porta lorem mollis. Lacus vestibulum sed arcu non odio euismod lacinia at quis. Augue mauris augue neque gravida in. Ornare quam viverra orci sagittis. Lacus suspendisse faucibus interdum posuere lorem ipsum. Arcu vitae elementum curabitur vitae nunc sed velit dignissim. Diam quam nulla porttitor massa id neque. Gravida dictum fusce ut placerat orci nulla pellentesque. Mus mauris vitae ultricies leo integer malesuada nunc vel risus. Donec pretium vulputate sapien nec sagittis aliquam. Velit egestas dui id ornare. Sed elementum tempus egestas sed sed risus pretium quam vulputate.
\ No newline at end of file
diff --git a/dotnet/src/IntegrationTests/testsettings.json b/dotnet/src/IntegrationTests/testsettings.json
index 39ec5c4d3b1c..66df73f8b7a5 100644
--- a/dotnet/src/IntegrationTests/testsettings.json
+++ b/dotnet/src/IntegrationTests/testsettings.json
@@ -51,8 +51,8 @@
"EmbeddingModelId": "embedding-001",
"ApiKey": "",
"Gemini": {
- "ModelId": "gemini-1.0-pro",
- "VisionModelId": "gemini-1.0-pro-vision"
+ "ModelId": "gemini-1.5-flash",
+ "VisionModelId": "gemini-1.5-flash"
}
},
"VertexAI": {
@@ -61,8 +61,8 @@
"Location": "us-central1",
"ProjectId": "",
"Gemini": {
- "ModelId": "gemini-1.0-pro",
- "VisionModelId": "gemini-1.0-pro-vision"
+ "ModelId": "gemini-1.5-flash",
+ "VisionModelId": "gemini-1.5-flash"
}
},
"Bing": {
diff --git a/dotnet/src/InternalUtilities/samples/InternalUtilities/XunitLogger.cs b/dotnet/src/InternalUtilities/samples/InternalUtilities/XunitLogger.cs
index ca2c22cd800a..578cc2aec366 100644
--- a/dotnet/src/InternalUtilities/samples/InternalUtilities/XunitLogger.cs
+++ b/dotnet/src/InternalUtilities/samples/InternalUtilities/XunitLogger.cs
@@ -7,16 +7,25 @@
///
internal sealed class XunitLogger(ITestOutputHelper output) : ILoggerFactory, ILogger, IDisposable
{
+ private object? _scopeState;
+
///
public void Log(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func formatter)
- => output.WriteLine(state?.ToString());
+ {
+ var localState = state?.ToString();
+ var line = this._scopeState is not null ? $"{this._scopeState} {localState}" : localState;
+ output.WriteLine(line);
+ }
///
public bool IsEnabled(LogLevel logLevel) => true;
///
public IDisposable BeginScope(TState state) where TState : notnull
- => this;
+ {
+ this._scopeState = state;
+ return this;
+ }
///
public void Dispose()
diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AuthorRole.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AuthorRole.cs
index 7c572509056c..05f473b1b792 100644
--- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AuthorRole.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AuthorRole.cs
@@ -32,7 +32,7 @@ namespace Microsoft.SemanticKernel.ChatCompletion;
public static AuthorRole Tool { get; } = new("tool");
///
- /// Gets the label associated with this AuthorRole.
+ /// Gets the label associated with this .
///
///
/// The label is what will be serialized into the "role" message field of the Chat Message format.
@@ -40,9 +40,9 @@ namespace Microsoft.SemanticKernel.ChatCompletion;
public string Label { get; }
///
- /// Creates a new AuthorRole instance with the provided label.
+ /// Creates a new instance with the provided label.
///
- /// The label to associate with this AuthorRole.
+ /// The label to associate with this .
[JsonConstructor]
public AuthorRole(string label)
{
@@ -51,21 +51,21 @@ public AuthorRole(string label)
}
///
- /// Returns a value indicating whether two AuthorRole instances are equivalent, as determined by a
+ /// Returns a value indicating whether two instances are equivalent, as determined by a
/// case-insensitive comparison of their labels.
///
- /// the first AuthorRole instance to compare
- /// the second AuthorRole instance to compare
+ /// the first instance to compare
+ /// the second instance to compare
/// true if left and right are both null or have equivalent labels; false otherwise
public static bool operator ==(AuthorRole left, AuthorRole right)
=> left.Equals(right);
///
- /// Returns a value indicating whether two AuthorRole instances are not equivalent, as determined by a
+ /// Returns a value indicating whether two instances are not equivalent, as determined by a
/// case-insensitive comparison of their labels.
///
- /// the first AuthorRole instance to compare
- /// the second AuthorRole instance to compare
+ /// the first instance to compare
+ /// the second instance to compare
/// false if left and right are both null or have equivalent labels; true otherwise
public static bool operator !=(AuthorRole left, AuthorRole right)
=> !(left == right);
@@ -80,8 +80,8 @@ public bool Equals(AuthorRole other)
///
public override int GetHashCode()
- => StringComparer.OrdinalIgnoreCase.GetHashCode(this.Label ?? string.Empty);
+ => StringComparer.OrdinalIgnoreCase.GetHashCode(this.Label);
///
- public override string ToString() => this.Label ?? string.Empty;
+ public override string ToString() => this.Label;
}
diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/ChatMessageContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/ChatMessageContent.cs
index 24ff3cf19438..0042cf5a2948 100644
--- a/dotnet/src/SemanticKernel.Abstractions/Contents/ChatMessageContent.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/Contents/ChatMessageContent.cs
@@ -20,7 +20,11 @@ public class ChatMessageContent : KernelContent
///
[Experimental("SKEXP0001")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
- public string? AuthorName { get; set; }
+ public string? AuthorName
+ {
+ get => this._authorName;
+ set => this._authorName = string.IsNullOrWhiteSpace(value) ? null : value;
+ }
///
/// Role of the author of the message
@@ -171,4 +175,5 @@ public override string ToString()
private ChatMessageContentItemCollection? _items;
private Encoding _encoding;
+ private string? _authorName;
}
diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingChatMessageContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingChatMessageContent.cs
index 5cc7afb582ed..cc2b0c354284 100644
--- a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingChatMessageContent.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingChatMessageContent.cs
@@ -64,7 +64,11 @@ public StreamingKernelContentItemCollection Items
///
[Experimental("SKEXP0001")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
- public string? AuthorName { get; set; }
+ public string? AuthorName
+ {
+ get => this._authorName;
+ set => this._authorName = string.IsNullOrWhiteSpace(value) ? null : value;
+ }
///
/// Role of the author of the message
@@ -126,4 +130,5 @@ public StreamingChatMessageContent(AuthorRole? role, string? content, object? in
private StreamingKernelContentItemCollection? _items;
private Encoding _encoding;
+ private string? _authorName;
}
diff --git a/dotnet/src/SemanticKernel.Abstractions/Filters/AutoFunctionInvocation/AutoFunctionInvocationContext.cs b/dotnet/src/SemanticKernel.Abstractions/Filters/AutoFunctionInvocation/AutoFunctionInvocationContext.cs
index f430324df867..39f2439b9df0 100644
--- a/dotnet/src/SemanticKernel.Abstractions/Filters/AutoFunctionInvocation/AutoFunctionInvocationContext.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/Filters/AutoFunctionInvocation/AutoFunctionInvocationContext.cs
@@ -1,6 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.
using System.Diagnostics.CodeAnalysis;
+using System.Threading;
using Microsoft.SemanticKernel.ChatCompletion;
namespace Microsoft.SemanticKernel;
@@ -35,6 +36,12 @@ public AutoFunctionInvocationContext(
this.ChatHistory = chatHistory;
}
+ ///
+ /// The to monitor for cancellation requests.
+ /// The default is .
+ ///
+ public CancellationToken CancellationToken { get; init; }
+
///
/// Gets the arguments associated with the operation.
///
diff --git a/dotnet/src/SemanticKernel.Abstractions/Filters/Function/FunctionInvocationContext.cs b/dotnet/src/SemanticKernel.Abstractions/Filters/Function/FunctionInvocationContext.cs
index c208f1a75f85..1ef77aac8e60 100644
--- a/dotnet/src/SemanticKernel.Abstractions/Filters/Function/FunctionInvocationContext.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/Filters/Function/FunctionInvocationContext.cs
@@ -1,6 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.
using System.Diagnostics.CodeAnalysis;
+using System.Threading;
namespace Microsoft.SemanticKernel;
@@ -29,6 +30,12 @@ internal FunctionInvocationContext(Kernel kernel, KernelFunction function, Kerne
this.Result = result;
}
+ ///
+ /// The to monitor for cancellation requests.
+ /// The default is .
+ ///
+ public CancellationToken CancellationToken { get; init; }
+
///
/// Gets the containing services, plugins, and other state for use throughout the operation.
///
diff --git a/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptRenderContext.cs b/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptRenderContext.cs
index a1e449642071..918586bfa6f1 100644
--- a/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptRenderContext.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/Filters/Prompt/PromptRenderContext.cs
@@ -1,6 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.
using System.Diagnostics.CodeAnalysis;
+using System.Threading;
namespace Microsoft.SemanticKernel;
@@ -29,6 +30,12 @@ internal PromptRenderContext(Kernel kernel, KernelFunction function, KernelArgum
this.Arguments = arguments;
}
+ ///
+ /// The to monitor for cancellation requests.
+ /// The default is .
+ ///
+ public CancellationToken CancellationToken { get; init; }
+
///
/// Gets the containing services, plugins, and other state for use throughout the operation.
///
diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs
index 31101bdb1958..9e50f653f5f8 100644
--- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelFunction.cs
@@ -186,7 +186,7 @@ public async Task InvokeAsync(
{
// Invoking the function and updating context with result.
context.Result = functionResult = await this.InvokeCoreAsync(kernel, context.Arguments, cancellationToken).ConfigureAwait(false);
- }).ConfigureAwait(false);
+ }, cancellationToken).ConfigureAwait(false);
// Apply any changes from the function filters context to final result.
functionResult = invocationContext.Result;
@@ -321,7 +321,7 @@ public async IAsyncEnumerable InvokeStreamingAsync(
context.Result = new FunctionResult(this, enumerable, kernel.Culture);
return Task.CompletedTask;
- }).ConfigureAwait(false);
+ }, cancellationToken).ConfigureAwait(false);
// Apply changes from the function filters to final result.
var enumerable = invocationContext.Result.GetValue>() ?? AsyncEnumerable.Empty();
diff --git a/dotnet/src/SemanticKernel.Abstractions/Kernel.cs b/dotnet/src/SemanticKernel.Abstractions/Kernel.cs
index c466fb9f6485..283d3de05dd5 100644
--- a/dotnet/src/SemanticKernel.Abstractions/Kernel.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/Kernel.cs
@@ -263,7 +263,7 @@ public IEnumerable GetAllServices() where T : class
// M.E.DI doesn't support querying for a service without a key, and it also doesn't
// support AnyKey currently: https://github.com/dotnet/runtime/issues/91466
// As a workaround, KernelBuilder injects a service containing the type-to-all-keys
- // mapping. We can query for that service and and then use it to try to get a service.
+ // mapping. We can query for that service and then use it to try to get a service.
if (this.Services.GetKeyedService>>(KernelServiceTypeToKeyMappings) is { } typeToKeyMappings)
{
if (typeToKeyMappings.TryGetValue(typeof(T), out HashSet