Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

.Net: Allow chat history mutation from auto-function invocation filters #7952

Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -900,6 +900,150 @@ public async Task FunctionResultsCanBeProvidedToLLMAsManyResultsInOneChatMessage
Assert.Equal("2", assistantMessage2.GetProperty("tool_call_id").GetString());
}

[Fact]
public async Task GetChatMessageContentShouldSendMutatedChatHistoryToLLM()
{
// Arrange
static void MutateChatHistory(AutoFunctionInvocationContext context, Func<AutoFunctionInvocationContext, Task> next)
{
// Remove the function call messages from the chat history to reduce token count.
context.ChatHistory.RemoveRange(1, 2); // Remove the `Date` function call and function result messages.

next(context);
}

var kernel = new Kernel();
kernel.ImportPluginFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => "rainy", "GetCurrentWeather")]);
kernel.AutoFunctionInvocationFilters.Add(new AutoFunctionInvocationFilter(MutateChatHistory));

using var firstResponse = new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_single_function_call_test_response.json")) };
this._messageHandlerStub.ResponsesToReturn.Add(firstResponse);

using var secondResponse = new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_test_response.json")) };
this._messageHandlerStub.ResponsesToReturn.Add(secondResponse);

var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient);

var chatHistory = new ChatHistory
{
new ChatMessageContent(AuthorRole.User, "What time is it?"),
new ChatMessageContent(AuthorRole.Assistant, [
new FunctionCallContent("Date", "TimePlugin", "2")
]),
new ChatMessageContent(AuthorRole.Tool, [
new FunctionResultContent("Date", "TimePlugin", "2", "rainy")
]),
new ChatMessageContent(AuthorRole.Assistant, "08/06/2024 00:00:00"),
new ChatMessageContent(AuthorRole.User, "Given the current time of day and weather, what is the likely color of the sky in Boston?")
};

// Act
await sut.GetChatMessageContentAsync(chatHistory, new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }, kernel);

// Assert
var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContents[1]!);
Assert.NotNull(actualRequestContent);

var optionsJson = JsonSerializer.Deserialize<JsonElement>(actualRequestContent);

var messages = optionsJson.GetProperty("messages");
Assert.Equal(5, messages.GetArrayLength());

var userFirstPrompt = messages[0];
Assert.Equal("user", userFirstPrompt.GetProperty("role").GetString());
Assert.Equal("What time is it?", userFirstPrompt.GetProperty("content").ToString());

var assistantFirstResponse = messages[1];
Assert.Equal("assistant", assistantFirstResponse.GetProperty("role").GetString());
Assert.Equal("08/06/2024 00:00:00", assistantFirstResponse.GetProperty("content").GetString());

var userSecondPrompt = messages[2];
Assert.Equal("user", userSecondPrompt.GetProperty("role").GetString());
Assert.Equal("Given the current time of day and weather, what is the likely color of the sky in Boston?", userSecondPrompt.GetProperty("content").ToString());

var assistantSecondResponse = messages[3];
Assert.Equal("assistant", assistantSecondResponse.GetProperty("role").GetString());
Assert.Equal("1", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("id").GetString());
Assert.Equal("MyPlugin-GetCurrentWeather", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("function").GetProperty("name").GetString());

var functionResult = messages[4];
Assert.Equal("tool", functionResult.GetProperty("role").GetString());
Assert.Equal("rainy", functionResult.GetProperty("content").GetString());
}

[Fact]
public async Task GetStreamingChatMessageContentsShouldSendMutatedChatHistoryToLLM()
{
// Arrange
static void MutateChatHistory(AutoFunctionInvocationContext context, Func<AutoFunctionInvocationContext, Task> next)
{
// Remove the function call messages from the chat history to reduce token count.
context.ChatHistory.RemoveRange(1, 2); // Remove the `Date` function call and function result messages.

next(context);
}

var kernel = new Kernel();
kernel.ImportPluginFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => "rainy", "GetCurrentWeather")]);
kernel.AutoFunctionInvocationFilters.Add(new AutoFunctionInvocationFilter(MutateChatHistory));

using var firstResponse = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_single_function_call_test_response.txt")) };
this._messageHandlerStub.ResponsesToReturn.Add(firstResponse);

using var secondResponse = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_test_response.txt")) };
this._messageHandlerStub.ResponsesToReturn.Add(secondResponse);

var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient);