From d436430e37f7401bb86bec1f74cee3aed01d7cde Mon Sep 17 00:00:00 2001
From: SergeyMenshykh <68852919+SergeyMenshykh@users.noreply.github.com>
Date: Thu, 18 Jul 2024 10:24:36 +0100
Subject: [PATCH] .Net: Test execution settings compatibility (#7337)
### Motivation, Context and Description
This PR adds a test that verifies the
`OpenAIPromptExecutionSettings.FromExecutionSettings` method can handle
arguments of type `AzureOpenAIPromptExecutionSettings`.
Additionally, it fixes the issue found by @crickman when the
`AzureOpenAIChatCompletionService.GetChatMessageContentsAsync` method is
called with `OpenAIPromptExecutionSettings` instead of
`AzureOpenAIPromptExecutionSettings`.
Closes https://github.com/microsoft/semantic-kernel/issues/7110
---
...AzureOpenAIPromptExecutionSettingsTests.cs | 5 +-
.../OpenAIPromptExecutionSettingsTests.cs | 63 +++++++++++++++++++
.../AzureOpenAIPromptExecutionSettings.cs | 5 ++
.../Settings/OpenAIPromptExecutionSettings.cs | 2 +-
4 files changed, 72 insertions(+), 3 deletions(-)
create mode 100644 dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs
index d187d7a49fb8..40d0e36fc1b6 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/AzureOpenAIPromptExecutionSettingsTests.cs
@@ -248,7 +248,7 @@ public void FromExecutionSettingsWithDataDoesNotIncludeEmptyStopSequences()
}
[Fact]
- public void FromExecutionSettingsCreateAzureOpenAIPromptExecutionSettingsFromOpenAIPromptExecutionSettings()
+ public void ItCanCreateAzureOpenAIPromptExecutionSettingsFromOpenAIPromptExecutionSettings()
{
// Arrange
OpenAIPromptExecutionSettings originalSettings = new()
@@ -263,7 +263,8 @@ public void FromExecutionSettingsCreateAzureOpenAIPromptExecutionSettingsFromOpe
MaxTokens = 128,
Logprobs = true,
Seed = 123456,
- TopLogprobs = 5
+ TopLogprobs = 5,
+ ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions
};
// Act
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs
new file mode 100644
index 000000000000..100b0b1901d8
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs
@@ -0,0 +1,63 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using Azure.AI.OpenAI.Chat;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
+
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Settings;
+
+///
+/// Unit tests for class.
+///
+public class OpenAIPromptExecutionSettingsTests
+{
+ [Fact]
+ public void ItCanCreateOpenAIPromptExecutionSettingsFromAzureOpenAIPromptExecutionSettings()
+ {
+ // Arrange
+ AzureOpenAIPromptExecutionSettings originalSettings = new()
+ {
+ Temperature = 0.7,
+ TopP = 0.7,
+ FrequencyPenalty = 0.7,
+ PresencePenalty = 0.7,
+ StopSequences = new string[] { "foo", "bar" },
+ ChatSystemPrompt = "chat system prompt",
+ TokenSelectionBiases = new Dictionary() { { 1, 2 }, { 3, 4 } },
+ MaxTokens = 128,
+ Logprobs = true,
+ Seed = 123456,
+ TopLogprobs = 5,
+ AzureChatDataSource = new AzureSearchChatDataSource
+ {
+ Endpoint = new Uri("https://test-host"),
+ Authentication = DataSourceAuthentication.FromApiKey("api-key"),
+ IndexName = "index-name"
+ }
+ };
+
+ // Act
+ OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings);
+
+ // Assert
+ AssertExecutionSettings(executionSettings);
+ }
+
+ private static void AssertExecutionSettings(OpenAIPromptExecutionSettings executionSettings)
+ {
+ Assert.NotNull(executionSettings);
+ Assert.Equal(0.7, executionSettings.Temperature);
+ Assert.Equal(0.7, executionSettings.TopP);
+ Assert.Equal(0.7, executionSettings.FrequencyPenalty);
+ Assert.Equal(0.7, executionSettings.PresencePenalty);
+ Assert.Equal(new string[] { "foo", "bar" }, executionSettings.StopSequences);
+ Assert.Equal("chat system prompt", executionSettings.ChatSystemPrompt);
+ Assert.Equal(new Dictionary() { { 1, 2 }, { 3, 4 } }, executionSettings.TokenSelectionBiases);
+ Assert.Equal(128, executionSettings.MaxTokens);
+ Assert.Equal(123456, executionSettings.Seed);
+ Assert.Equal(true, executionSettings.Logprobs);
+ Assert.Equal(5, executionSettings.TopLogprobs);
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Settings/AzureOpenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Settings/AzureOpenAIPromptExecutionSettings.cs
index 4cfbdf0bb72c..90a20d3435b7 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Settings/AzureOpenAIPromptExecutionSettings.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Settings/AzureOpenAIPromptExecutionSettings.cs
@@ -62,6 +62,11 @@ public override PromptExecutionSettings Clone()
return settings;
}
+ if (executionSettings is OpenAIPromptExecutionSettings openAISettings)
+ {
+ return openAISettings.Clone();
+ }
+
// Having the object as the type of the value to serialize is important to ensure all properties of the settings are serialized.
// Otherwise, only the properties ServiceId and ModelId from the public API of the PromptExecutionSettings class will be serialized.
var json = JsonSerializer.Serialize