Skip to content

Commit

Permalink
.Net: SK AI Model Router Demo (microsoft#7615)
Browse files Browse the repository at this point in the history
### Motivation and Context

This sample demonstrates how to implement an AI Model Router using
Semantic Kernel connectors to direct requests to various AI models based
on user input.

As part of this example we integrate LMStudio, Ollama, and OpenAI,
utilizing the OpenAI Connector for LMStudio and Ollama due to their
compatibility with the OpenAI API.

Resolves microsoft#7616

- microsoft#7616

---------

Co-authored-by: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com>
  • Loading branch information
RogerBarreto and dmytrostruk committed Aug 6, 2024
1 parent a41bb91 commit 7b8ac8a
Show file tree
Hide file tree
Showing 6 changed files with 201 additions and 1 deletion.
11 changes: 10 additions & 1 deletion dotnet/SK-dotnet.sln
Original file line number Diff line number Diff line change
Expand Up @@ -318,7 +318,9 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Redis.UnitTests"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Qdrant.UnitTests", "src\Connectors\Connectors.Qdrant.UnitTests\Connectors.Qdrant.UnitTests.csproj", "{E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StepwisePlannerMigration", "samples\Demos\StepwisePlannerMigration\StepwisePlannerMigration.csproj", "{38374C62-0263-4FE8-A18C-70FC8132912B}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "StepwisePlannerMigration", "samples\Demos\StepwisePlannerMigration\StepwisePlannerMigration.csproj", "{38374C62-0263-4FE8-A18C-70FC8132912B}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AIModelRouter", "samples\Demos\AIModelRouter\AIModelRouter.csproj", "{E06818E3-00A5-41AC-97ED-9491070CDEA1}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Expand Down Expand Up @@ -795,6 +797,12 @@ Global
{38374C62-0263-4FE8-A18C-70FC8132912B}.Publish|Any CPU.Build.0 = Debug|Any CPU
{38374C62-0263-4FE8-A18C-70FC8132912B}.Release|Any CPU.ActiveCfg = Release|Any CPU
{38374C62-0263-4FE8-A18C-70FC8132912B}.Release|Any CPU.Build.0 = Release|Any CPU
{E06818E3-00A5-41AC-97ED-9491070CDEA1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{E06818E3-00A5-41AC-97ED-9491070CDEA1}.Debug|Any CPU.Build.0 = Debug|Any CPU
{E06818E3-00A5-41AC-97ED-9491070CDEA1}.Publish|Any CPU.ActiveCfg = Debug|Any CPU
{E06818E3-00A5-41AC-97ED-9491070CDEA1}.Publish|Any CPU.Build.0 = Debug|Any CPU
{E06818E3-00A5-41AC-97ED-9491070CDEA1}.Release|Any CPU.ActiveCfg = Release|Any CPU
{E06818E3-00A5-41AC-97ED-9491070CDEA1}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
Expand Down Expand Up @@ -904,6 +912,7 @@ Global
{1D4667B9-9381-4E32-895F-123B94253EE8} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C}
{E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C}
{38374C62-0263-4FE8-A18C-70FC8132912B} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263}
{E06818E3-00A5-41AC-97ED-9491070CDEA1} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {FBDC56A3-86AD-4323-AA0F-201E59123B83}
Expand Down
20 changes: 20 additions & 0 deletions dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFrameworks>net8.0;netstandard2.0</TargetFrameworks>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<UserSecretsId>5ee045b0-aea3-4f08-8d31-32d1a6f8fed0</UserSecretsId>
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Configuration" />
<PackageReference Include="Microsoft.Extensions.Configuration.UserSecrets" />
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\..\..\src\Connectors\Connectors.OpenAI\Connectors.OpenAI.csproj" />
</ItemGroup>

</Project>
38 changes: 38 additions & 0 deletions dotnet/samples/Demos/AIModelRouter/CustomRouter.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
// Copyright (c) Microsoft. All rights reserved.

#pragma warning disable SKEXP0001
#pragma warning disable SKEXP0010
#pragma warning disable CA2249 // Consider using 'string.Contains' instead of 'string.IndexOf'

namespace AIModelRouter;

/// <summary>
/// This class is for demonstration purposes only.
/// In a real-world scenario, you would use a more sophisticated routing mechanism, such as another local model for
/// deciding which service to use based on the user's input or any other criteria.
/// </summary>
public class CustomRouter()
{
/// <summary>
/// Returns the best service id to use based on the user's input.
/// This demonstration uses a simple logic where your input is checked for specific keywords as a deciding factor,
/// if no keyword is found it defaults to the first service in the list.
/// </summary>
/// <param name="lookupPrompt">User's input prompt</param>
/// <param name="serviceIds">List of service ids to choose from in order of importance, defaulting to the first</param>
/// <returns>Service id.</returns>
public string FindService(string lookupPrompt, IReadOnlyList<string> serviceIds)
{
// The order matters, if the keyword is not found, the first one is used.
foreach (var serviceId in serviceIds)
{
if (Contains(lookupPrompt, serviceId)) { return serviceId; }
}

return serviceIds[0];
}

// Ensure compatibility with both netstandard2.0 and net8.0 by using IndexOf instead of Contains
private static bool Contains(string prompt, string pattern)
=> prompt.IndexOf(pattern, StringComparison.CurrentCultureIgnoreCase) >= 0;
}
56 changes: 56 additions & 0 deletions dotnet/samples/Demos/AIModelRouter/Program.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
// Copyright (c) Microsoft. All rights reserved.

using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.SemanticKernel;

#pragma warning disable SKEXP0001
#pragma warning disable SKEXP0010
#pragma warning disable CA2249 // Consider using 'string.Contains' instead of 'string.IndexOf'

namespace AIModelRouter;

internal sealed partial class Program
{
private static async Task Main(string[] args)
{
Console.ForegroundColor = ConsoleColor.White;

var config = new ConfigurationBuilder().AddUserSecrets<Program>().Build();

ServiceCollection services = new();

// Adding multiple connectors targeting different providers / models.
services.AddKernel() /* LMStudio model is selected in server side. */
.AddOpenAIChatCompletion(serviceId: "lmstudio", modelId: "N/A", endpoint: new Uri("http://localhost:1234"), apiKey: null)
.AddOpenAIChatCompletion(serviceId: "ollama", modelId: "phi3", endpoint: new Uri("http://localhost:11434"), apiKey: null)
.AddOpenAIChatCompletion(serviceId: "openai", modelId: "gpt-4o", apiKey: config["OpenAI:ApiKey"]!)

// Adding a custom filter to capture router selected service id
.Services.AddSingleton<IPromptRenderFilter>(new SelectedServiceFilter());

var kernel = services.BuildServiceProvider().GetRequiredService<Kernel>();
var router = new CustomRouter();

while (true)
{
Console.Write("\n\nUser > ");
var userMessage = Console.ReadLine();

// Exit application if the user enters an empty message
if (string.IsNullOrWhiteSpace(userMessage)) { return; }

// Find the best service to use based on the user's input
KernelArguments arguments = new(new PromptExecutionSettings()
{
ServiceId = router.FindService(userMessage, ["lmstudio", "ollama", "openai"])
});

// Invoke the prompt and print the response
await foreach (var chatChunk in kernel.InvokePromptStreamingAsync(userMessage, arguments).ConfigureAwait(false))
{
Console.Write(chatChunk);
}
}
}
}
51 changes: 51 additions & 0 deletions dotnet/samples/Demos/AIModelRouter/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
# AI Model Router

This sample demonstrates how to implement an AI Model Router using Semantic Kernel connectors to direct requests to various AI models based on user input. As part of this example we integrate LMStudio, Ollama, and OpenAI, utilizing the OpenAI Connector for LMStudio and Ollama due to their compatibility with the OpenAI API.

> [!IMPORTANT]
> You can modify to use any other combination of connector or OpenAI compatible API model provider.
## Semantic Kernel Features Used

- [Chat Completion Service](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs) - Using the Chat Completion Service [OpenAI Connector implementation](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/OpenAIChatCompletionService.cs) to generate responses from the LLM.
- [Filters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs), using to capture selected service and log in the console.

## Prerequisites

- [.NET 8](https://dotnet.microsoft.com/download/dotnet/8.0).

## Configuring the sample

The sample can be configured by using the command line with .NET [Secret Manager](https://learn.microsoft.com/en-us/aspnet/core/security/app-secrets) to avoid the risk of leaking secrets into the repository, branches and pull requests.

### Using .NET [Secret Manager](https://learn.microsoft.com/en-us/aspnet/core/security/app-secrets)

```powershell
# OpenAI (Not required if using Azure OpenAI)
dotnet user-secrets set "OpenAI:ApiKey" "... your api key ... "
```

## Running the sample

After configuring the sample, to build and run the console application just hit `F5`.

To build and run the console application from the terminal use the following commands:

```powershell
dotnet build
dotnet run
```

### Example of a conversation

> **User** > OpenAI, what is Jupiter? Keep it simple.
> **Assistant** > Sure! Jupiter is the largest planet in our solar system. It's a gas giant, mostly made of hydrogen and helium, and it has a lot of storms, including the famous Great Red Spot. Jupiter also has at least 79 moons.
> **User** > Ollama, what is Jupiter? Keep it simple.
> **Assistant** > Jupiter is a giant planet in our solar system known for being the largest and most massive, famous for its spectacled clouds and dozens of moons including Ganymede which is bigger than Earth!
> **User** > LMStudio, what is Jupiter? Keep it simple.
> **Assistant** > Jupiter is the fifth planet from the Sun in our Solar System and one of its gas giants alongside Saturn, Uranus, and Neptune. It's famous for having a massive storm called the Great Red Spot that has been raging for hundreds of years.
26 changes: 26 additions & 0 deletions dotnet/samples/Demos/AIModelRouter/SelectedServiceFilter.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
// Copyright (c) Microsoft. All rights reserved.

using Microsoft.SemanticKernel;

#pragma warning disable SKEXP0001
#pragma warning disable SKEXP0010
#pragma warning disable CA2249 // Consider using 'string.Contains' instead of 'string.IndexOf'

namespace AIModelRouter;

/// <summary>
/// Using a filter to log the service being used for the prompt.
/// </summary>
public class SelectedServiceFilter : IPromptRenderFilter
{
/// <inheritdoc/>
public Task OnPromptRenderAsync(PromptRenderContext context, Func<PromptRenderContext, Task> next)
{
Console.ForegroundColor = ConsoleColor.Yellow;
Console.WriteLine($"Selected service id: '{context.Arguments.ExecutionSettings?.FirstOrDefault().Key}'");

Console.ForegroundColor = ConsoleColor.White;
Console.Write("Assistant > ");
return next(context);
}
}

0 comments on commit 7b8ac8a

Please sign in to comment.