diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props
index 0b996a99c192..7a07062811db 100644
--- a/dotnet/Directory.Packages.props
+++ b/dotnet/Directory.Packages.props
@@ -5,10 +5,10 @@
true
-
-
+
+
-
+
diff --git a/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs b/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs
index d34cadaf3707..70985d0fc27b 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs
@@ -62,9 +62,9 @@ await agent.CreateThreadAsync(
finally
{
await agent.DeleteThreadAsync(threadId);
- await agent.DeleteAsync();
+ await agent.DeleteAsync(CancellationToken.None);
await vectorStoreClient.DeleteVectorStoreAsync(vectorStore);
- await fileClient.DeleteFileAsync(fileInfo);
+ await fileClient.DeleteFileAsync(fileInfo.Id);
}
// Local function to invoke agent and display the conversation messages.
diff --git a/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs
index 97a439729ff3..c4acca58770f 100644
--- a/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs
+++ b/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs
@@ -46,10 +46,17 @@ public static FunctionToolDefinition ToToolDefinition(this KernelFunction functi
required,
};
- return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName), function.Description, BinaryData.FromObjectAsJson(spec));
+ return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName))
+ {
+ Description = function.Description,
+ Parameters = BinaryData.FromObjectAsJson(spec)
+ };
}
- return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName), function.Description);
+ return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName))
+ {
+ Description = function.Description
+ };
}
private static string ConvertType(Type? type)
diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs
index d66f54917d3f..06c49f7a1905 100644
--- a/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs
+++ b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs
@@ -1,4 +1,5 @@
// Copyright (c) Microsoft. All rights reserved.
+using System.ClientModel;
using System.Collections.Generic;
using System.Linq;
using System.Net;
@@ -52,7 +53,9 @@ public static async Task CreateThreadAsync(AssistantClient client, OpenA
{
foreach (ChatMessageContent message in options.Messages)
{
- ThreadInitializationMessage threadMessage = new(AssistantMessageFactory.GetMessageContents(message));
+ ThreadInitializationMessage threadMessage = new(
+ role: message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Assistant,
+ content: AssistantMessageFactory.GetMessageContents(message));
createOptions.InitialMessages.Add(threadMessage);
}
}
@@ -89,6 +92,7 @@ public static async Task CreateMessageAsync(AssistantClient client, string threa
await client.CreateMessageAsync(
threadId,
+ message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Assistant,
AssistantMessageFactory.GetMessageContents(message),
options,
cancellationToken).ConfigureAwait(false);
@@ -105,28 +109,31 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
{
Dictionary agentNames = []; // Cache agent names by their identifier
- await foreach (ThreadMessage message in client.GetMessagesAsync(threadId, ListOrder.NewestFirst, cancellationToken).ConfigureAwait(false))
+ await foreach (PageResult page in client.GetMessagesAsync(threadId, new() { Order = ListOrder.NewestFirst }, cancellationToken).ConfigureAwait(false))
{
- AuthorRole role = new(message.Role.ToString());
-
- string? assistantName = null;
- if (!string.IsNullOrWhiteSpace(message.AssistantId) &&
- !agentNames.TryGetValue(message.AssistantId, out assistantName))
+ foreach (var message in page.Values)
{
- Assistant assistant = await client.GetAssistantAsync(message.AssistantId).ConfigureAwait(false); // SDK BUG - CANCEL TOKEN (https://github.com/microsoft/semantic-kernel/issues/7431)
- if (!string.IsNullOrWhiteSpace(assistant.Name))
+ AuthorRole role = new(message.Role.ToString());
+
+ string? assistantName = null;
+ if (!string.IsNullOrWhiteSpace(message.AssistantId) &&
+ !agentNames.TryGetValue(message.AssistantId, out assistantName))
{
- agentNames.Add(assistant.Id, assistant.Name);
+ Assistant assistant = await client.GetAssistantAsync(message.AssistantId, cancellationToken).ConfigureAwait(false);
+ if (!string.IsNullOrWhiteSpace(assistant.Name))
+ {
+ agentNames.Add(assistant.Id, assistant.Name);
+ }
}
- }
- assistantName ??= message.AssistantId;
+ assistantName ??= message.AssistantId;
- ChatMessageContent content = GenerateMessageContent(assistantName, message);
+ ChatMessageContent content = GenerateMessageContent(assistantName, message);
- if (content.Items.Count > 0)
- {
- yield return content;
+ if (content.Items.Count > 0)
+ {
+ yield return content;
+ }
}
}
}
@@ -190,7 +197,11 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
throw new KernelException($"Agent Failure - Run terminated: {run.Status} [{run.Id}]: {run.LastError?.Message ?? "Unknown"}");
}
- RunStep[] steps = await client.GetRunStepsAsync(run).ToArrayAsync(cancellationToken).ConfigureAwait(false);
+ List steps = [];
+ await foreach (var page in client.GetRunStepsAsync(run).ConfigureAwait(false))
+ {
+ steps.AddRange(page.Values);
+ };
// Is tool action required?
if (run.Status == RunStatus.RequiresAction)
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
index f5c4a3588cf8..28c8dba9e3a8 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
@@ -108,9 +108,12 @@ public static async IAsyncEnumerable ListDefinitionsA
AssistantClient client = CreateClient(provider);
// Query and enumerate assistant definitions
- await foreach (Assistant model in client.GetAssistantsAsync(ListOrder.NewestFirst, cancellationToken).ConfigureAwait(false))
+ await foreach (var page in client.GetAssistantsAsync(new AssistantCollectionOptions() { Order = ListOrder.NewestFirst }, cancellationToken).ConfigureAwait(false))
{
- yield return CreateAssistantDefinition(model);
+ foreach (Assistant model in page.Values)
+ {
+ yield return CreateAssistantDefinition(model);
+ }
}
}
@@ -132,7 +135,7 @@ public static async Task RetrieveAsync(
AssistantClient client = CreateClient(provider);
// Retrieve the assistant
- Assistant model = await client.GetAssistantAsync(id).ConfigureAwait(false); // SDK BUG - CANCEL TOKEN (https://github.com/microsoft/semantic-kernel/issues/7431)
+ Assistant model = await client.GetAssistantAsync(id, cancellationToken).ConfigureAwait(false);
// Instantiate the agent
return
diff --git a/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs b/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs
index 3e2e395a77ea..0b60b66fa84a 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs
@@ -50,7 +50,7 @@ public static OpenAIClientProvider ForAzureOpenAI(ApiKeyCredential apiKey, Uri e
Verify.NotNull(apiKey, nameof(apiKey));
Verify.NotNull(endpoint, nameof(endpoint));
- AzureOpenAIClientOptions clientOptions = CreateAzureClientOptions(endpoint, httpClient);
+ AzureOpenAIClientOptions clientOptions = CreateAzureClientOptions(httpClient);
return new(new AzureOpenAIClient(endpoint, apiKey!, clientOptions), CreateConfigurationKeys(endpoint, httpClient));
}
@@ -66,7 +66,7 @@ public static OpenAIClientProvider ForAzureOpenAI(TokenCredential credential, Ur
Verify.NotNull(credential, nameof(credential));
Verify.NotNull(endpoint, nameof(endpoint));
- AzureOpenAIClientOptions clientOptions = CreateAzureClientOptions(endpoint, httpClient);
+ AzureOpenAIClientOptions clientOptions = CreateAzureClientOptions(httpClient);
return new(new AzureOpenAIClient(endpoint, credential, clientOptions), CreateConfigurationKeys(endpoint, httpClient));
}
@@ -102,12 +102,11 @@ public static OpenAIClientProvider FromClient(OpenAIClient client)
return new(client, [client.GetType().FullName!, client.GetHashCode().ToString()]);
}
- private static AzureOpenAIClientOptions CreateAzureClientOptions(Uri? endpoint, HttpClient? httpClient)
+ private static AzureOpenAIClientOptions CreateAzureClientOptions(HttpClient? httpClient)
{
AzureOpenAIClientOptions options = new()
{
- ApplicationId = HttpHeaderConstant.Values.UserAgent,
- Endpoint = endpoint,
+ ApplicationId = HttpHeaderConstant.Values.UserAgent
};
ConfigureClientOptions(httpClient, options);
@@ -128,7 +127,7 @@ private static OpenAIClientOptions CreateOpenAIClientOptions(Uri? endpoint, Http
return options;
}
- private static void ConfigureClientOptions(HttpClient? httpClient, OpenAIClientOptions options)
+ private static void ConfigureClientOptions(HttpClient? httpClient, ClientPipelineOptions options)
{
options.AddPolicy(CreateRequestHeaderPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(OpenAIAssistantAgent))), PipelinePosition.PerCall);
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs
index 8a8fc8dfedca..f0c7cdf4250d 100644
--- a/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI/Core/AzureClientCore.ChatCompletion.cs
@@ -1,13 +1,11 @@
// Copyright (c) Microsoft. All rights reserved.
-using System.Collections.Generic;
using System.Diagnostics;
using Azure.AI.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using Microsoft.SemanticKernel.Diagnostics;
using OpenAI.Chat;
-using OpenAIChatCompletion = OpenAI.Chat.ChatCompletion;
#pragma warning disable CA2208 // Instantiate argument exceptions correctly
@@ -18,34 +16,9 @@ namespace Microsoft.SemanticKernel.Connectors.AzureOpenAI;
///
internal partial class AzureClientCore
{
- private const string ContentFilterResultForPromptKey = "ContentFilterResultForPrompt";
- private const string ContentFilterResultForResponseKey = "ContentFilterResultForResponse";
-
///
protected override OpenAIPromptExecutionSettings GetSpecializedExecutionSettings(PromptExecutionSettings? executionSettings)
- {
- return AzureOpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings);
- }
-
- ///
- protected override Dictionary GetChatCompletionMetadata(OpenAIChatCompletion completions)
- {
-#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
- return new Dictionary
- {
- { nameof(completions.Id), completions.Id },
- { nameof(completions.CreatedAt), completions.CreatedAt },
- { ContentFilterResultForPromptKey, completions.GetContentFilterResultForPrompt() },
- { nameof(completions.SystemFingerprint), completions.SystemFingerprint },
- { nameof(completions.Usage), completions.Usage },
- { ContentFilterResultForResponseKey, completions.GetContentFilterResultForResponse() },
-
- // Serialization of this struct behaves as an empty object {}, need to cast to string to avoid it.
- { nameof(completions.FinishReason), completions.FinishReason.ToString() },
- { nameof(completions.ContentTokenLogProbabilities), completions.ContentTokenLogProbabilities },
- };
-#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
- }
+ => AzureOpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings);
///
protected override Activity? StartCompletionActivity(ChatHistory chatHistory, PromptExecutionSettings settings)
@@ -71,7 +44,7 @@ protected override ChatCompletionOptions CreateChatCompletionOptions(
FrequencyPenalty = (float?)executionSettings.FrequencyPenalty,
PresencePenalty = (float?)executionSettings.PresencePenalty,
Seed = executionSettings.Seed,
- User = executionSettings.User,
+ EndUserId = executionSettings.User,
TopLogProbabilityCount = executionSettings.TopLogprobs,
IncludeLogProbabilities = executionSettings.Logprobs,
ResponseFormat = GetResponseFormat(azureSettings) ?? ChatResponseFormat.Text,
diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/ClientCoreTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/ClientCoreTests.cs
index bf6caf1ee3f2..f41b204058ed 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/ClientCoreTests.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/ClientCoreTests.cs
@@ -67,9 +67,9 @@ public void ItUsesEndpointAsExpected(string? clientBaseAddress, string? provided
var clientCore = new ClientCore("model", "apiKey", endpoint: endpoint, httpClient: client);
// Assert
- Assert.Equal(endpoint ?? client?.BaseAddress ?? new Uri("https://api.openai.com/v1"), clientCore.Endpoint);
+ Assert.Equal(endpoint ?? client?.BaseAddress ?? new Uri("https://api.openai.com/"), clientCore.Endpoint);
Assert.True(clientCore.Attributes.ContainsKey(AIServiceExtensions.EndpointKey));
- Assert.Equal(endpoint?.ToString() ?? client?.BaseAddress?.ToString() ?? "https://api.openai.com/v1", clientCore.Attributes[AIServiceExtensions.EndpointKey]);
+ Assert.Equal(endpoint?.ToString() ?? client?.BaseAddress?.ToString() ?? "https://api.openai.com/", clientCore.Attributes[AIServiceExtensions.EndpointKey]);
client?.Dispose();
}
diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs
index ccda12afe6a6..326b14bc7368 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Services/OpenAIChatCompletionServiceTests.cs
@@ -76,13 +76,10 @@ public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory)
}
[Theory]
- [InlineData("http://localhost:1234/chat/completions", "http://localhost:1234/chat/completions")] // Uses full path when provided
- [InlineData("http://localhost:1234/v2/chat/completions", "http://localhost:1234/v2/chat/completions")] // Uses full path when provided
- [InlineData("http://localhost:1234", "http://localhost:1234/v1/chat/completions")]
+ [InlineData("http://localhost:1234/v1/chat/completions", "http://localhost:1234/v1/chat/completions")] // Uses full path when provided
+ [InlineData("http://localhost:1234/", "http://localhost:1234/v1/chat/completions")]
[InlineData("http://localhost:8080", "http://localhost:8080/v1/chat/completions")]
[InlineData("https://something:8080", "https://something:8080/v1/chat/completions")] // Accepts TLS Secured endpoints
- [InlineData("http://localhost:1234/v2", "http://localhost:1234/v2/chat/completions")]
- [InlineData("http://localhost:8080/v2", "http://localhost:8080/v2/chat/completions")]
public async Task ItUsesCustomEndpointsWhenProvidedDirectlyAsync(string endpointProvided, string expectedEndpoint)
{
// Arrange
@@ -98,13 +95,10 @@ public async Task ItUsesCustomEndpointsWhenProvidedDirectlyAsync(string endpoint
}
[Theory]
- [InlineData("http://localhost:1234/chat/completions", "http://localhost:1234/chat/completions")] // Uses full path when provided
- [InlineData("http://localhost:1234/v2/chat/completions", "http://localhost:1234/v2/chat/completions")] // Uses full path when provided
- [InlineData("http://localhost:1234", "http://localhost:1234/v1/chat/completions")]
+ [InlineData("http://localhost:1234/v1/chat/completions", "http://localhost:1234/v1/chat/completions")] // Uses full path when provided
+ [InlineData("http://localhost:1234/", "http://localhost:1234/v1/chat/completions")]
[InlineData("http://localhost:8080", "http://localhost:8080/v1/chat/completions")]
[InlineData("https://something:8080", "https://something:8080/v1/chat/completions")] // Accepts TLS Secured endpoints
- [InlineData("http://localhost:1234/v2", "http://localhost:1234/v2/chat/completions")]
- [InlineData("http://localhost:8080/v2", "http://localhost:8080/v2/chat/completions")]
public async Task ItUsesCustomEndpointsWhenProvidedAsBaseAddressAsync(string endpointProvided, string expectedEndpoint)
{
// Arrange
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs
index 1177fb7ec846..6546bd291235 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.ChatCompletion.cs
@@ -364,7 +364,7 @@ internal async IAsyncEnumerable GetStreamingC
using (var activity = this.StartCompletionActivity(chat, chatExecutionSettings))
{
// Make the request.
- AsyncResultCollection response;
+ AsyncCollectionResult response;
try
{
response = RunRequest(() => this.Client!.GetChatClient(targetModel).CompleteChatStreamingAsync(chatForRequest, chatOptions, cancellationToken));
@@ -644,7 +644,7 @@ protected virtual ChatCompletionOptions CreateChatCompletionOptions(
FrequencyPenalty = (float?)executionSettings.FrequencyPenalty,
PresencePenalty = (float?)executionSettings.PresencePenalty,
Seed = executionSettings.Seed,
- User = executionSettings.User,
+ EndUserId = executionSettings.User,
TopLogProbabilityCount = executionSettings.TopLogprobs,
IncludeLogProbabilities = executionSettings.Logprobs,
ResponseFormat = GetResponseFormat(executionSettings) ?? ChatResponseFormat.Text,
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.TextToAudio.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.TextToAudio.cs
index c0fd15380dfb..1a34fe7a0230 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.TextToAudio.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.TextToAudio.cs
@@ -40,7 +40,7 @@ internal async Task> GetAudioContentsAsync(
Speed = audioExecutionSettings.Speed,
};
- ClientResult response = await RunRequestAsync(() => this.Client!.GetAudioClient(targetModel).GenerateSpeechFromTextAsync(prompt, GetGeneratedSpeechVoice(audioExecutionSettings?.Voice), options, cancellationToken)).ConfigureAwait(false);
+ ClientResult response = await RunRequestAsync(() => this.Client!.GetAudioClient(targetModel).GenerateSpeechAsync(prompt, GetGeneratedSpeechVoice(audioExecutionSettings?.Voice), options, cancellationToken)).ConfigureAwait(false);
return [new AudioContent(response.Value.ToArray(), mimeType)];
}
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.cs
index 843768bc17c2..271aa2321ea2 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/Core/ClientCore.cs
@@ -36,7 +36,7 @@ internal partial class ClientCore
///
/// Default OpenAI API endpoint.
///
- private const string OpenAIV1Endpoint = "https://api.openai.com/v1";
+ private const string OpenAIEndpoint = "https://api.openai.com/";
///
/// Identifier of the default model to use
@@ -104,7 +104,7 @@ internal ClientCore(
if (this.Endpoint is null)
{
Verify.NotNullOrWhiteSpace(apiKey); // For Public OpenAI Endpoint a key must be provided.
- this.Endpoint = new Uri(OpenAIV1Endpoint);
+ this.Endpoint = new Uri(OpenAIEndpoint);
}
else if (string.IsNullOrEmpty(apiKey))
{
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIChatCompletionService.cs b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIChatCompletionService.cs
index 08de7612b078..f544d8a5c61c 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIChatCompletionService.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/Services/OpenAIChatCompletionService.cs
@@ -71,25 +71,17 @@ public OpenAIChatCompletionService(
var providedEndpoint = endpoint ?? httpClient?.BaseAddress;
if (providedEndpoint is not null)
{
- // If the provided endpoint does not provide a path, we add a version to the base path for compatibility
- if (providedEndpoint.PathAndQuery.Length == 0 || providedEndpoint.PathAndQuery == "/")
+ // As OpenAI Client automatically adds the chat completions endpoint, we remove it to avoid duplication.
+ const string PathAndQueryPattern = "v1/chat/completions";
+ var providedEndpointText = providedEndpoint.ToString();
+ int index = providedEndpointText.IndexOf(PathAndQueryPattern, StringComparison.OrdinalIgnoreCase);
+ if (index >= 0)
{
- internalClientEndpoint = new Uri(providedEndpoint, "/v1/");
+ internalClientEndpoint = new Uri($"{providedEndpointText.Substring(0, index)}{providedEndpointText.Substring(index + PathAndQueryPattern.Length)}");
}
else
{
- // As OpenAI Client automatically adds the chatcompletions endpoint, we remove it to avoid duplication.
- const string PathAndQueryPattern = "/chat/completions";
- var providedEndpointText = providedEndpoint.ToString();
- int index = providedEndpointText.IndexOf(PathAndQueryPattern, StringComparison.OrdinalIgnoreCase);
- if (index >= 0)
- {
- internalClientEndpoint = new Uri($"{providedEndpointText.Substring(0, index)}{providedEndpointText.Substring(index + PathAndQueryPattern.Length)}");
- }
- else
- {
- internalClientEndpoint = providedEndpoint;
- }
+ internalClientEndpoint = providedEndpoint;
}
}