Skip to content

Commit

Permalink
.Net: Test execution settings compatibility (#7337)
Browse files Browse the repository at this point in the history
### Motivation, Context and Description
This PR adds a test that verifies the
`OpenAIPromptExecutionSettings.FromExecutionSettings` method can handle
arguments of type `AzureOpenAIPromptExecutionSettings`.

Additionally, it fixes the issue found by @crickman when the
`AzureOpenAIChatCompletionService.GetChatMessageContentsAsync` method is
called with `OpenAIPromptExecutionSettings` instead of
`AzureOpenAIPromptExecutionSettings`.

Closes #7110
  • Loading branch information
SergeyMenshykh committed Jul 18, 2024
1 parent 3b8e54f commit d436430
Show file tree
Hide file tree
Showing 4 changed files with 72 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,7 @@ public void FromExecutionSettingsWithDataDoesNotIncludeEmptyStopSequences()
}

[Fact]
public void FromExecutionSettingsCreateAzureOpenAIPromptExecutionSettingsFromOpenAIPromptExecutionSettings()
public void ItCanCreateAzureOpenAIPromptExecutionSettingsFromOpenAIPromptExecutionSettings()
{
// Arrange
OpenAIPromptExecutionSettings originalSettings = new()
Expand All @@ -263,7 +263,8 @@ public void FromExecutionSettingsCreateAzureOpenAIPromptExecutionSettingsFromOpe
MaxTokens = 128,
Logprobs = true,
Seed = 123456,
TopLogprobs = 5
TopLogprobs = 5,
ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions
};

// Act
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
// Copyright (c) Microsoft. All rights reserved.

using System;
using System.Collections.Generic;
using Azure.AI.OpenAI.Chat;
using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
using Microsoft.SemanticKernel.Connectors.OpenAI;

namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Settings;

/// <summary>
/// Unit tests for <see cref="OpenAIPromptExecutionSettingsTests"/> class.
/// </summary>
public class OpenAIPromptExecutionSettingsTests
{
[Fact]
public void ItCanCreateOpenAIPromptExecutionSettingsFromAzureOpenAIPromptExecutionSettings()
{
// Arrange
AzureOpenAIPromptExecutionSettings originalSettings = new()
{
Temperature = 0.7,
TopP = 0.7,
FrequencyPenalty = 0.7,
PresencePenalty = 0.7,
StopSequences = new string[] { "foo", "bar" },
ChatSystemPrompt = "chat system prompt",
TokenSelectionBiases = new Dictionary<int, int>() { { 1, 2 }, { 3, 4 } },
MaxTokens = 128,
Logprobs = true,
Seed = 123456,
TopLogprobs = 5,
AzureChatDataSource = new AzureSearchChatDataSource
{
Endpoint = new Uri("https://test-host"),
Authentication = DataSourceAuthentication.FromApiKey("api-key"),
IndexName = "index-name"
}
};

// Act
OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings);

// Assert
AssertExecutionSettings(executionSettings);
}

private static void AssertExecutionSettings(OpenAIPromptExecutionSettings executionSettings)
{
Assert.NotNull(executionSettings);
Assert.Equal(0.7, executionSettings.Temperature);
Assert.Equal(0.7, executionSettings.TopP);
Assert.Equal(0.7, executionSettings.FrequencyPenalty);
Assert.Equal(0.7, executionSettings.PresencePenalty);
Assert.Equal(new string[] { "foo", "bar" }, executionSettings.StopSequences);
Assert.Equal("chat system prompt", executionSettings.ChatSystemPrompt);
Assert.Equal(new Dictionary<int, int>() { { 1, 2 }, { 3, 4 } }, executionSettings.TokenSelectionBiases);
Assert.Equal(128, executionSettings.MaxTokens);
Assert.Equal(123456, executionSettings.Seed);
Assert.Equal(true, executionSettings.Logprobs);
Assert.Equal(5, executionSettings.TopLogprobs);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,11 @@ public override PromptExecutionSettings Clone()
return settings;
}

if (executionSettings is OpenAIPromptExecutionSettings openAISettings)
{
return openAISettings.Clone<AzureOpenAIPromptExecutionSettings>();
}

// Having the object as the type of the value to serialize is important to ensure all properties of the settings are serialized.
// Otherwise, only the properties ServiceId and ModelId from the public API of the PromptExecutionSettings class will be serialized.
var json = JsonSerializer.Serialize<object>(executionSettings);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ public static OpenAIPromptExecutionSettings FromExecutionSettings(PromptExecutio
/// </summary>
/// <typeparam name="T">The type of the settings object to clone.</typeparam>
/// <returns>A new instance of the settings object.</returns>
protected T Clone<T>() where T : OpenAIPromptExecutionSettings, new()
protected internal T Clone<T>() where T : OpenAIPromptExecutionSettings, new()
{
return new T()
{
Expand Down

0 comments on commit d436430

Please sign in to comment.