Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,14 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Text.Encodings.Web;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.AI;
using Microsoft.Shared.Diagnostics;

#pragma warning disable S103 // Lines should not be too long

namespace OpenAI.Chat;

/// <summary>Provides extension methods for working with content associated with OpenAI.Chat.</summary>
Expand All @@ -27,10 +28,10 @@ public static ChatTool AsOpenAIChatTool(this AIFunction function) =>

/// <summary>Creates a sequence of OpenAI <see cref="ChatMessage"/> instances from the specified input messages.</summary>
/// <param name="messages">The input messages to convert.</param>
/// <param name="options">The options employed while processing <paramref name="messages"/>.</param>
/// <returns>A sequence of OpenAI chat messages.</returns>
/// <exception cref="ArgumentNullException"><paramref name="messages"/> is <see langword="null"/>.</exception>
public static IEnumerable<ChatMessage> AsOpenAIChatMessages(this IEnumerable<Microsoft.Extensions.AI.ChatMessage> messages) =>
OpenAIChatClient.ToOpenAIChatMessages(Throw.IfNull(messages), chatOptions: null);
public static IEnumerable<ChatMessage> AsOpenAIChatMessages(this IEnumerable<Microsoft.Extensions.AI.ChatMessage> messages, ChatOptions? options = null) =>
OpenAIChatClient.ToOpenAIChatMessages(Throw.IfNull(messages), options);

/// <summary>Creates an OpenAI <see cref="ChatCompletion"/> from a <see cref="ChatResponse"/>.</summary>
/// <param name="response">The <see cref="ChatResponse"/> to convert to a <see cref="ChatCompletion"/>.</param>
Expand All @@ -47,24 +48,9 @@ public static ChatCompletion AsOpenAIChatCompletion(this ChatResponse response)

var lastMessage = response.Messages.LastOrDefault();

ChatMessageRole role = lastMessage?.Role.Value switch
{
"user" => ChatMessageRole.User,
"function" => ChatMessageRole.Function,
"tool" => ChatMessageRole.Tool,
"developer" => ChatMessageRole.Developer,
"system" => ChatMessageRole.System,
_ => ChatMessageRole.Assistant,
};
ChatMessageRole role = ToChatMessageRole(lastMessage?.Role);

ChatFinishReason finishReason = response.FinishReason?.Value switch
{
"length" => ChatFinishReason.Length,
"content_filter" => ChatFinishReason.ContentFilter,
"tool_calls" => ChatFinishReason.ToolCalls,
"function_call" => ChatFinishReason.FunctionCall,
_ => ChatFinishReason.Stop,
};
ChatFinishReason finishReason = ToChatFinishReason(response.FinishReason);

ChatTokenUsage usage = OpenAIChatModelFactory.ChatTokenUsage(
(int?)response.Usage?.OutputTokenCount ?? 0,
Expand Down Expand Up @@ -124,6 +110,52 @@ static IEnumerable<ChatMessageAnnotation> ConvertAnnotations(IEnumerable<AIConte
}
}

/// <summary>
/// Creates a sequence of OpenAI <see cref="StreamingChatCompletionUpdate"/> instances from the specified
/// sequence of <see cref="ChatResponseUpdate"/> instances.
/// </summary>
/// <param name="responseUpdates">The update instances.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
/// <returns>A sequence of converted <see cref="ChatResponseUpdate"/> instances.</returns>
/// <exception cref="ArgumentNullException"><paramref name="responseUpdates"/> is <see langword="null"/>.</exception>
public static async IAsyncEnumerable<StreamingChatCompletionUpdate> AsOpenAIStreamingChatCompletionUpdatesAsync(
this IAsyncEnumerable<ChatResponseUpdate> responseUpdates, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
_ = Throw.IfNull(responseUpdates);

await foreach (var update in responseUpdates.WithCancellation(cancellationToken).ConfigureAwait(false))
{
if (update.RawRepresentation is StreamingChatCompletionUpdate streamingUpdate)
{
yield return streamingUpdate;
continue;
}

var usage = update.Contents.FirstOrDefault(c => c is UsageContent) is UsageContent usageContent ?
OpenAIChatModelFactory.ChatTokenUsage(
(int?)usageContent.Details.OutputTokenCount ?? 0,
(int?)usageContent.Details.InputTokenCount ?? 0,
(int?)usageContent.Details.TotalTokenCount ?? 0) :
null;

var toolCallUpdates = update.Contents.OfType<FunctionCallContent>().Select((fcc, index) =>
OpenAIChatModelFactory.StreamingChatToolCallUpdate(
index, fcc.CallId, ChatToolCallKind.Function, fcc.Name,
new(JsonSerializer.SerializeToUtf8Bytes(fcc.Arguments, AIJsonUtilities.DefaultOptions.GetTypeInfo(typeof(IDictionary<string, object?>))))))
.ToList();

yield return OpenAIChatModelFactory.StreamingChatCompletionUpdate(
update.ResponseId,
new(OpenAIChatClient.ToOpenAIChatContent(update.Contents)),
toolCallUpdates: toolCallUpdates,
role: ToChatMessageRole(update.Role),
finishReason: ToChatFinishReason(update.FinishReason),
createdAt: update.CreatedAt ?? default,
model: update.ModelId,
usage: usage);
}
}

/// <summary>Creates a sequence of <see cref="Microsoft.Extensions.AI.ChatMessage"/> instances from the specified input messages.</summary>
/// <param name="messages">The input messages to convert.</param>
/// <returns>A sequence of Microsoft.Extensions.AI chat messages.</returns>
Expand Down Expand Up @@ -205,4 +237,40 @@ static object ToToolResult(ChatMessageContent content)
/// <exception cref="ArgumentNullException"><paramref name="chatCompletion"/> is <see langword="null"/>.</exception>
public static ChatResponse AsChatResponse(this ChatCompletion chatCompletion, ChatCompletionOptions? options = null) =>
OpenAIChatClient.FromOpenAIChatCompletion(Throw.IfNull(chatCompletion), options);

/// <summary>
/// Creates a sequence of Microsoft.Extensions.AI <see cref="ChatResponseUpdate"/> instances from the specified
/// sequence of OpenAI <see cref="StreamingChatCompletionUpdate"/> instances.
/// </summary>
/// <param name="chatCompletionUpdates">The update instances.</param>
/// <param name="options">The options employed in the creation of the response.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
/// <returns>A sequence of converted <see cref="ChatResponseUpdate"/> instances.</returns>
/// <exception cref="ArgumentNullException"><paramref name="chatCompletionUpdates"/> is <see langword="null"/>.</exception>
public static IAsyncEnumerable<ChatResponseUpdate> AsChatResponseUpdatesAsync(
this IAsyncEnumerable<StreamingChatCompletionUpdate> chatCompletionUpdates, ChatCompletionOptions? options = null, CancellationToken cancellationToken = default) =>
OpenAIChatClient.FromOpenAIStreamingChatCompletionAsync(Throw.IfNull(chatCompletionUpdates), options, cancellationToken);

/// <summary>Converts the <see cref="ChatRole"/> to a <see cref="ChatMessageRole"/>.</summary>
private static ChatMessageRole ToChatMessageRole(ChatRole? role) =>
role?.Value switch
{
"user" => ChatMessageRole.User,
"function" => ChatMessageRole.Function,
"tool" => ChatMessageRole.Tool,
"developer" => ChatMessageRole.Developer,
"system" => ChatMessageRole.System,
_ => ChatMessageRole.Assistant,
};

/// <summary>Converts the <see cref="Microsoft.Extensions.AI.ChatFinishReason"/> to a <see cref="ChatFinishReason"/>.</summary>
private static ChatFinishReason ToChatFinishReason(Microsoft.Extensions.AI.ChatFinishReason? finishReason) =>
finishReason?.Value switch
{
"length" => ChatFinishReason.Length,
"content_filter" => ChatFinishReason.ContentFilter,
"tool_calls" => ChatFinishReason.ToolCalls,
"function_call" => ChatFinishReason.FunctionCall,
_ => ChatFinishReason.Stop,
};
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

using System;
using System.Collections.Generic;
using System.Threading;
using Microsoft.Extensions.AI;
using Microsoft.Shared.Diagnostics;

Expand All @@ -20,10 +21,11 @@ public static ResponseTool AsOpenAIResponseTool(this AIFunction function) =>

/// <summary>Creates a sequence of OpenAI <see cref="ResponseItem"/> instances from the specified input messages.</summary>
/// <param name="messages">The input messages to convert.</param>
/// <param name="options">The options employed while processing <paramref name="messages"/>.</param>
/// <returns>A sequence of OpenAI response items.</returns>
/// <exception cref="ArgumentNullException"><paramref name="messages"/> is <see langword="null"/>.</exception>
public static IEnumerable<ResponseItem> AsOpenAIResponseItems(this IEnumerable<ChatMessage> messages) =>
OpenAIResponsesChatClient.ToOpenAIResponseItems(Throw.IfNull(messages));
public static IEnumerable<ResponseItem> AsOpenAIResponseItems(this IEnumerable<ChatMessage> messages, ChatOptions? options = null) =>
OpenAIResponsesChatClient.ToOpenAIResponseItems(Throw.IfNull(messages), options);

/// <summary>Creates a sequence of <see cref="ChatMessage"/> instances from the specified input items.</summary>
/// <param name="items">The input messages to convert.</param>
Expand All @@ -40,6 +42,19 @@ public static IEnumerable<ChatMessage> AsChatMessages(this IEnumerable<ResponseI
public static ChatResponse AsChatResponse(this OpenAIResponse response, ResponseCreationOptions? options = null) =>
OpenAIResponsesChatClient.FromOpenAIResponse(Throw.IfNull(response), options);

/// <summary>
/// Creates a sequence of Microsoft.Extensions.AI <see cref="ChatResponseUpdate"/> instances from the specified
/// sequence of OpenAI <see cref="StreamingResponseUpdate"/> instances.
/// </summary>
/// <param name="responseUpdates">The update instances.</param>
/// <param name="options">The options employed in the creation of the response.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
/// <returns>A sequence of converted <see cref="ChatResponseUpdate"/> instances.</returns>
/// <exception cref="ArgumentNullException"><paramref name="responseUpdates"/> is <see langword="null"/>.</exception>
public static IAsyncEnumerable<ChatResponseUpdate> AsChatResponseUpdatesAsync(
this IAsyncEnumerable<StreamingResponseUpdate> responseUpdates, ResponseCreationOptions? options = null, CancellationToken cancellationToken = default) =>
OpenAIResponsesChatClient.FromOpenAIStreamingResponseUpdatesAsync(Throw.IfNull(responseUpdates), options, cancellationToken);

/// <summary>Creates an OpenAI <see cref="OpenAIResponse"/> from a <see cref="ChatResponse"/>.</summary>
/// <param name="response">The response to convert.</param>
/// <returns>The created <see cref="OpenAIResponse"/>.</returns>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,7 @@ internal static List<ChatMessageContentPart> ToOpenAIChatContent(IEnumerable<AIC
return null;
}

private static async IAsyncEnumerable<ChatResponseUpdate> FromOpenAIStreamingChatCompletionAsync(
internal static async IAsyncEnumerable<ChatResponseUpdate> FromOpenAIStreamingChatCompletionAsync(
IAsyncEnumerable<StreamingChatCompletionUpdate> updates,
ChatCompletionOptions? options,
[EnumeratorCancellation] CancellationToken cancellationToken)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ public async Task<ChatResponse> GetResponseAsync(
_ = Throw.IfNull(messages);

// Convert the inputs into what OpenAIResponseClient expects.
var openAIResponseItems = ToOpenAIResponseItems(messages);
var openAIResponseItems = ToOpenAIResponseItems(messages, options);
var openAIOptions = ToOpenAIResponseCreationOptions(options);

// Make the call to the OpenAIResponseClient.
Expand Down Expand Up @@ -174,16 +174,22 @@ internal static IEnumerable<ChatMessage> ToChatMessages(IEnumerable<ResponseItem
}

/// <inheritdoc />
public async IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(
IEnumerable<ChatMessage> messages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
public IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(
IEnumerable<ChatMessage> messages, ChatOptions? options = null, CancellationToken cancellationToken = default)
{
_ = Throw.IfNull(messages);

// Convert the inputs into what OpenAIResponseClient expects.
var openAIResponseItems = ToOpenAIResponseItems(messages);
var openAIResponseItems = ToOpenAIResponseItems(messages, options);
var openAIOptions = ToOpenAIResponseCreationOptions(options);

// Make the call to the OpenAIResponseClient and process the streaming results.
var streamingUpdates = _responseClient.CreateResponseStreamingAsync(openAIResponseItems, openAIOptions, cancellationToken);

return FromOpenAIStreamingResponseUpdatesAsync(streamingUpdates, openAIOptions, cancellationToken);
}

internal static async IAsyncEnumerable<ChatResponseUpdate> FromOpenAIStreamingResponseUpdatesAsync(
IAsyncEnumerable<StreamingResponseUpdate> streamingResponseUpdates, ResponseCreationOptions? options, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
DateTimeOffset? createdAt = null;
string? responseId = null;
string? conversationId = null;
Expand All @@ -192,14 +198,15 @@ public async IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(
ChatRole? lastRole = null;
Dictionary<int, MessageResponseItem> outputIndexToMessages = [];
Dictionary<int, FunctionCallInfo>? functionCallInfos = null;
await foreach (var streamingUpdate in _responseClient.CreateResponseStreamingAsync(openAIResponseItems, openAIOptions, cancellationToken).ConfigureAwait(false))

await foreach (var streamingUpdate in streamingResponseUpdates.WithCancellation(cancellationToken).ConfigureAwait(false))
{
switch (streamingUpdate)
{
case StreamingResponseCreatedUpdate createdUpdate:
createdAt = createdUpdate.Response.CreatedAt;
responseId = createdUpdate.Response.Id;
conversationId = openAIOptions.StoredOutputEnabled is false ? null : responseId;
conversationId = options?.StoredOutputEnabled is false ? null : responseId;
modelId = createdUpdate.Response.Model;
goto default;

Expand Down Expand Up @@ -485,8 +492,10 @@ private ResponseCreationOptions ToOpenAIResponseCreationOptions(ChatOptions? opt
}

/// <summary>Convert a sequence of <see cref="ChatMessage"/>s to <see cref="ResponseItem"/>s.</summary>
internal static IEnumerable<ResponseItem> ToOpenAIResponseItems(IEnumerable<ChatMessage> inputs)
internal static IEnumerable<ResponseItem> ToOpenAIResponseItems(IEnumerable<ChatMessage> inputs, ChatOptions? options)
{
_ = options; // currently unused

foreach (ChatMessage input in inputs)
{
if (input.Role == ChatRole.System ||
Expand Down
Loading
Loading