-
Notifications
You must be signed in to change notification settings - Fork 3.1k
/
OpenAI_ChatCompletionWithVision.cs
59 lines (43 loc) · 1.92 KB
/
OpenAI_ChatCompletionWithVision.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
// Copyright (c) Microsoft. All rights reserved.
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Resources;
namespace ChatCompletion;
// This example shows how to use GPT Vision model with different content types (text and image).
public class OpenAI_ChatCompletionWithVision(ITestOutputHelper output) : BaseTest(output)
{
[Fact]
public async Task RemoteImageAsync()
{
const string ImageUri = "https://upload.wikimedia.org/wikipedia/commons/d/d5/Half-timbered_mansion%2C_Zirkel%2C_East_view.jpg";
var kernel = Kernel.CreateBuilder()
.AddOpenAIChatCompletion("gpt-4-vision-preview", TestConfiguration.OpenAI.ApiKey)
.Build();
var chatCompletionService = kernel.GetRequiredService<IChatCompletionService>();
var chatHistory = new ChatHistory("You are a friendly assistant.");
chatHistory.AddUserMessage(
[
new TextContent("What’s in this image?"),
new ImageContent(new Uri(ImageUri))
]);
var reply = await chatCompletionService.GetChatMessageContentAsync(chatHistory);
Console.WriteLine(reply.Content);
}
[Fact]
public async Task LocalImageAsync()
{
var imageBytes = await EmbeddedResource.ReadAllAsync("sample_image.jpg");
var kernel = Kernel.CreateBuilder()
.AddOpenAIChatCompletion("gpt-4-vision-preview", TestConfiguration.OpenAI.ApiKey)
.Build();
var chatCompletionService = kernel.GetRequiredService<IChatCompletionService>();
var chatHistory = new ChatHistory("You are a friendly assistant.");
chatHistory.AddUserMessage(
[
new TextContent("What’s in this image?"),
new ImageContent(imageBytes, "image/jpg")
]);
var reply = await chatCompletionService.GetChatMessageContentAsync(chatHistory);
Console.WriteLine(reply.Content);
}
}