-
Notifications
You must be signed in to change notification settings - Fork 3.1k
/
OpenAI_ChatCompletion.cs
198 lines (156 loc) · 8.9 KB
/
OpenAI_ChatCompletion.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
// Copyright (c) Microsoft. All rights reserved.
using System.Text;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
namespace ChatCompletion;
// The following example shows how to use Semantic Kernel with OpenAI API
public class OpenAI_ChatCompletion(ITestOutputHelper output) : BaseTest(output)
{
[Fact]
public async Task ServicePromptAsync()
{
Assert.NotNull(TestConfiguration.OpenAI.ChatModelId);
Assert.NotNull(TestConfiguration.OpenAI.ApiKey);
Console.WriteLine("======== Open AI - Chat Completion ========");
OpenAIChatCompletionService chatCompletionService = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey);
await StartChatAsync(chatCompletionService);
}
[Fact]
public async Task ServicePromptWithInnerContentAsync()
{
Assert.NotNull(TestConfiguration.OpenAI.ChatModelId);
Assert.NotNull(TestConfiguration.OpenAI.ApiKey);
Console.WriteLine("======== Open AI - Chat Completion ========");
OpenAIChatCompletionService chatService = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey);
Console.WriteLine("Chat content:");
Console.WriteLine("------------------------");
var chatHistory = new ChatHistory("You are a librarian, expert about books");
// First user message
chatHistory.AddUserMessage("Hi, I'm looking for book suggestions");
this.OutputLastMessage(chatHistory);
// First assistant message
var reply = await chatService.GetChatMessageContentAsync(chatHistory, new OpenAIPromptExecutionSettings { Logprobs = true, TopLogprobs = 3 });
// Assistant message details
var replyInnerContent = reply.InnerContent as OpenAI.Chat.ChatCompletion;
OutputInnerContent(replyInnerContent!);
}
[Fact]
public async Task ChatPromptAsync()
{
Assert.NotNull(TestConfiguration.OpenAI.ChatModelId);
Assert.NotNull(TestConfiguration.OpenAI.ApiKey);
StringBuilder chatPrompt = new("""
<message role="system">You are a librarian, expert about books</message>
<message role="user">Hi, I'm looking for book suggestions</message>
""");
var kernel = Kernel.CreateBuilder()
.AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey)
.Build();
var reply = await kernel.InvokePromptAsync(chatPrompt.ToString());
chatPrompt.AppendLine($"<message role=\"assistant\"><![CDATA[{reply}]]></message>");
chatPrompt.AppendLine("<message role=\"user\">I love history and philosophy, I'd like to learn something new about Greece, any suggestion</message>");
reply = await kernel.InvokePromptAsync(chatPrompt.ToString());
Console.WriteLine(reply);
}
/// <summary>
/// Demonstrates how you can template a chat history call and get extra information from the response while using the kernel for invocation.
/// </summary>
/// <remarks>
/// This is a breaking glass scenario, any attempt on running with different versions of OpenAI SDK that introduces breaking changes
/// may cause breaking changes in the code below.
/// </remarks>
[Fact]
public async Task ChatPromptWithInnerContentAsync()
{
Assert.NotNull(TestConfiguration.OpenAI.ChatModelId);
Assert.NotNull(TestConfiguration.OpenAI.ApiKey);
StringBuilder chatPrompt = new("""
<message role="system">You are a librarian, expert about books</message>
<message role="user">Hi, I'm looking for book suggestions</message>
""");
var kernel = Kernel.CreateBuilder()
.AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey)
.Build();
var functionResult = await kernel.InvokePromptAsync(chatPrompt.ToString(),
new(new OpenAIPromptExecutionSettings { Logprobs = true, TopLogprobs = 3 }));
var messageContent = functionResult.GetValue<ChatMessageContent>(); // Retrieves underlying chat message content from FunctionResult.
var replyInnerContent = messageContent!.InnerContent as OpenAI.Chat.ChatCompletion; // Retrieves inner content from ChatMessageContent.
OutputInnerContent(replyInnerContent!);
}
private async Task StartChatAsync(IChatCompletionService chatGPT)
{
Console.WriteLine("Chat content:");
Console.WriteLine("------------------------");
var chatHistory = new ChatHistory("You are a librarian, expert about books");
// First user message
chatHistory.AddUserMessage("Hi, I'm looking for book suggestions");
OutputLastMessage(chatHistory);
// First assistant message
var reply = await chatGPT.GetChatMessageContentAsync(chatHistory);
chatHistory.Add(reply);
OutputLastMessage(chatHistory);
// Second user message
chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion");
OutputLastMessage(chatHistory);
// Second assistant message
reply = await chatGPT.GetChatMessageContentAsync(chatHistory);
chatHistory.Add(reply);
OutputLastMessage(chatHistory);
}
/// <summary>
/// Retrieve extra information from a <see cref="ChatMessageContent"/> inner content of type <see cref="OpenAI.Chat.ChatCompletion"/>.
/// </summary>
/// <param name="innerContent">An instance of <see cref="OpenAI.Chat.ChatCompletion"/> retrieved as an inner content of <see cref="ChatMessageContent"/>.</param>
/// <remarks>
/// This is a breaking glass scenario, any attempt on running with different versions of OpenAI SDK that introduces breaking changes
/// may break the code below.
/// </remarks>
private void OutputInnerContent(OpenAI.Chat.ChatCompletion innerContent)
{
Console.WriteLine($"Message role: {innerContent.Role}"); // Available as a property of ChatMessageContent
Console.WriteLine($"Message content: {innerContent.Content[0].Text}"); // Available as a property of ChatMessageContent
Console.WriteLine($"Model: {innerContent.Model}"); // Model doesn't change per chunk, so we can get it from the first chunk only
Console.WriteLine($"Created At: {innerContent.CreatedAt}");
Console.WriteLine($"Finish reason: {innerContent.FinishReason}");
Console.WriteLine($"Input tokens usage: {innerContent.Usage.InputTokens}");
Console.WriteLine($"Output tokens usage: {innerContent.Usage.OutputTokens}");
Console.WriteLine($"Total tokens usage: {innerContent.Usage.TotalTokens}");
Console.WriteLine($"Refusal: {innerContent.Refusal} ");
Console.WriteLine($"Id: {innerContent.Id}");
Console.WriteLine($"System fingerprint: {innerContent.SystemFingerprint}");
if (innerContent.ContentTokenLogProbabilities.Count > 0)
{
Console.WriteLine("Content token log probabilities:");
foreach (var contentTokenLogProbability in innerContent.ContentTokenLogProbabilities)
{
Console.WriteLine($"Token: {contentTokenLogProbability.Token}");
Console.WriteLine($"Log probability: {contentTokenLogProbability.LogProbability}");
Console.WriteLine(" Top log probabilities for this token:");
foreach (var topLogProbability in contentTokenLogProbability.TopLogProbabilities)
{
Console.WriteLine($" Token: {topLogProbability.Token}");
Console.WriteLine($" Log probability: {topLogProbability.LogProbability}");
Console.WriteLine(" =======");
}
Console.WriteLine("--------------");
}
}
if (innerContent.RefusalTokenLogProbabilities.Count > 0)
{
Console.WriteLine("Refusal token log probabilities:");
foreach (var refusalTokenLogProbability in innerContent.RefusalTokenLogProbabilities)
{
Console.WriteLine($"Token: {refusalTokenLogProbability.Token}");
Console.WriteLine($"Log probability: {refusalTokenLogProbability.LogProbability}");
Console.WriteLine(" Refusal top log probabilities for this token:");
foreach (var topLogProbability in refusalTokenLogProbability.TopLogProbabilities)
{
Console.WriteLine($" Token: {topLogProbability.Token}");
Console.WriteLine($" Log probability: {topLogProbability.LogProbability}");
Console.WriteLine(" =======");
}
}
}
}
}