Refactored chat requests (#607)
Some checks are pending
Build and Release / Read metadata (push) Waiting to run
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-apple-darwin, osx-arm64, macos-latest, aarch64-apple-darwin, dmg updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-pc-windows-msvc.exe, win-arm64, windows-latest, aarch64-pc-windows-msvc, nsis updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-unknown-linux-gnu, linux-arm64, ubuntu-22.04-arm, aarch64-unknown-linux-gnu, appimage deb updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-apple-darwin, osx-x64, macos-latest, x86_64-apple-darwin, dmg updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-pc-windows-msvc.exe, win-x64, windows-latest, x86_64-pc-windows-msvc, nsis updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-unknown-linux-gnu, linux-x64, ubuntu-22.04, x86_64-unknown-linux-gnu, appimage deb updater) (push) Blocked by required conditions
Build and Release / Prepare & create release (push) Blocked by required conditions
Build and Release / Publish release (push) Blocked by required conditions

This commit is contained in:
Thorsten Sommer 2025-12-28 14:10:20 +01:00 committed by GitHub
parent ef3d58cbee
commit 4be5002088
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
32 changed files with 137 additions and 79 deletions

View File

@ -1,4 +1,6 @@
namespace AIStudio.Chat;
using AIStudio.Provider;
namespace AIStudio.Chat;
public static class ListContentBlockExtensions
{
@ -7,9 +9,8 @@ public static class ListContentBlockExtensions
/// </summary>
/// <param name="blocks">The list of content blocks to process.</param>
/// <param name="transformer">A function that transforms each content block into a message result asynchronously.</param>
/// <typeparam name="TResult">The type of the result produced by the transformation function.</typeparam>
/// <returns>An asynchronous task that resolves to a list of transformed results.</returns>
public static async Task<IList<TResult>> BuildMessages<TResult>(this List<ContentBlock> blocks, Func<ContentBlock, Task<TResult>> transformer)
public static async Task<IList<IMessageBase>> BuildMessages(this List<ContentBlock> blocks, Func<ContentBlock, Task<IMessageBase>> transformer)
{
var messages = blocks
.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text))

View File

@ -30,7 +30,7 @@ public sealed class ProviderAlibabaCloud() : BaseProvider("https://dashscope-int
yield break;
// Prepare the system prompt:
var systemPrompt = new Message
var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@ -40,7 +40,7 @@ public sealed class ProviderAlibabaCloud() : BaseProvider("https://dashscope-int
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{

View File

@ -1,5 +1,4 @@
using System.Text.Json.Serialization;
using AIStudio.Provider.OpenAI;
namespace AIStudio.Provider.Anthropic;
@ -13,7 +12,7 @@ namespace AIStudio.Provider.Anthropic;
/// <param name="System">The system prompt for the chat completion.</param>
public readonly record struct ChatRequest(
string Model,
IList<Message> Messages,
IList<IMessageBase> Messages,
int MaxTokens,
bool Stream,
string System

View File

@ -31,7 +31,7 @@ public sealed class ProviderAnthropic() : BaseProvider("https://api.anthropic.co
var apiParameters = this.ParseAdditionalApiParameters("system");
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{

View File

@ -40,7 +40,7 @@ public abstract class BaseProvider : IProvider, ISecretId
protected static readonly JsonSerializerOptions JSON_SERIALIZER_OPTIONS = new()
{
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
Converters = { new AnnotationConverter() },
Converters = { new AnnotationConverter(), new MessageBaseConverter() },
AllowTrailingCommas = false
};
@ -130,7 +130,7 @@ public abstract class BaseProvider : IProvider, ISecretId
if (nextResponse.StatusCode is HttpStatusCode.Forbidden)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Block, string.Format(TB("Tried to communicate with the LLM provider '{0}'. You might not be able to use this provider from your location. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
this.logger.LogError("Failed request with status code {ResponseStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
errorMessage = nextResponse.ReasonPhrase;
break;
}
@ -138,7 +138,7 @@ public abstract class BaseProvider : IProvider, ISecretId
if(nextResponse.StatusCode is HttpStatusCode.BadRequest)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. The required message format might be changed. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
this.logger.LogError("Failed request with status code {ResponseStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
errorMessage = nextResponse.ReasonPhrase;
break;
}
@ -146,7 +146,7 @@ public abstract class BaseProvider : IProvider, ISecretId
if(nextResponse.StatusCode is HttpStatusCode.NotFound)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. Something was not found. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
this.logger.LogError("Failed request with status code {ResponseStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
errorMessage = nextResponse.ReasonPhrase;
break;
}
@ -154,7 +154,7 @@ public abstract class BaseProvider : IProvider, ISecretId
if(nextResponse.StatusCode is HttpStatusCode.Unauthorized)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Key, string.Format(TB("Tried to communicate with the LLM provider '{0}'. The API key might be invalid. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
this.logger.LogError("Failed request with status code {ResponseStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
errorMessage = nextResponse.ReasonPhrase;
break;
}
@ -162,7 +162,7 @@ public abstract class BaseProvider : IProvider, ISecretId
if(nextResponse.StatusCode is HttpStatusCode.InternalServerError)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. The server might be down or having issues. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
this.logger.LogError("Failed request with status code {ResponseStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
errorMessage = nextResponse.ReasonPhrase;
break;
}
@ -170,7 +170,7 @@ public abstract class BaseProvider : IProvider, ISecretId
if(nextResponse.StatusCode is HttpStatusCode.ServiceUnavailable)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. The provider is overloaded. The message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
this.logger.LogError("Failed request with status code {ResponseStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
errorMessage = nextResponse.ReasonPhrase;
break;
}

View File

@ -30,7 +30,7 @@ public sealed class ProviderDeepSeek() : BaseProvider("https://api.deepseek.com/
yield break;
// Prepare the system prompt:
var systemPrompt = new Message
var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@ -40,7 +40,7 @@ public sealed class ProviderDeepSeek() : BaseProvider("https://api.deepseek.com/
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{

View File

@ -10,7 +10,7 @@ namespace AIStudio.Provider.Fireworks;
/// <param name="Stream">Whether to stream the chat completion.</param>
public readonly record struct ChatRequest(
string Model,
IList<Message> Messages,
IList<IMessageBase> Messages,
bool Stream
)
{

View File

@ -30,7 +30,7 @@ public class ProviderFireworks() : BaseProvider("https://api.fireworks.ai/infere
yield break;
// Prepare the system prompt:
var systemPrompt = new Message
var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@ -40,7 +40,7 @@ public class ProviderFireworks() : BaseProvider("https://api.fireworks.ai/infere
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{

View File

@ -5,4 +5,9 @@ namespace AIStudio.Provider.Fireworks;
/// </summary>
/// <param name="Content">The text content of the message.</param>
/// <param name="Role">The role of the message.</param>
public readonly record struct Message(string Content, string Role);
public record TextMessage(string Content, string Role) : IMessage<string>
{
public TextMessage() : this(string.Empty, string.Empty)
{
}
}

View File

@ -30,7 +30,7 @@ public sealed class ProviderGWDG() : BaseProvider("https://chat-ai.academiccloud
yield break;
// Prepare the system prompt:
var systemPrompt = new Message
var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@ -40,7 +40,7 @@ public sealed class ProviderGWDG() : BaseProvider("https://chat-ai.academiccloud
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{

View File

@ -1,5 +1,4 @@
using System.Text.Json.Serialization;
using AIStudio.Provider.OpenAI;
namespace AIStudio.Provider.Google;
@ -11,7 +10,7 @@ namespace AIStudio.Provider.Google;
/// <param name="Stream">Whether to stream the chat completion.</param>
public readonly record struct ChatRequest(
string Model,
IList<Message> Messages,
IList<IMessageBase> Messages,
bool Stream
)
{

View File

@ -30,7 +30,7 @@ public class ProviderGoogle() : BaseProvider("https://generativelanguage.googlea
yield break;
// Prepare the system prompt:
var systemPrompt = new Message
var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@ -40,7 +40,7 @@ public class ProviderGoogle() : BaseProvider("https://generativelanguage.googlea
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{

View File

@ -1,5 +1,4 @@
using System.Text.Json.Serialization;
using AIStudio.Provider.OpenAI;
namespace AIStudio.Provider.Groq;
@ -12,7 +11,7 @@ namespace AIStudio.Provider.Groq;
/// <param name="Seed">The seed for the chat completion.</param>
public readonly record struct ChatRequest(
string Model,
IList<Message> Messages,
IList<IMessageBase> Messages,
bool Stream,
int Seed
)

View File

@ -30,7 +30,7 @@ public class ProviderGroq() : BaseProvider("https://api.groq.com/openai/v1/", LO
yield break;
// Prepare the system prompt:
var systemPrompt = new Message
var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@ -40,7 +40,7 @@ public class ProviderGroq() : BaseProvider("https://api.groq.com/openai/v1/", LO
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{

View File

@ -30,7 +30,7 @@ public sealed class ProviderHelmholtz() : BaseProvider("https://api.helmholtz-bl
yield break;
// Prepare the system prompt:
var systemPrompt = new Message
var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@ -40,7 +40,7 @@ public sealed class ProviderHelmholtz() : BaseProvider("https://api.helmholtz-bl
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{

View File

@ -35,7 +35,7 @@ public sealed class ProviderHuggingFace : BaseProvider
yield break;
// Prepare the system prompt:
var systemPrompt = new Message
var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@ -45,7 +45,7 @@ public sealed class ProviderHuggingFace : BaseProvider
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var message = await chatThread.Blocks.BuildMessages(async n => new Message
var message = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{

View File

@ -0,0 +1,15 @@
namespace AIStudio.Provider;
/// <summary>
/// Standard interface for messages exchanged with AI models.
/// </summary>
/// <typeparam name="T">The type of the message content.</typeparam>
public interface IMessage<T> : IMessageBase
{
/// <summary>
/// Gets the main content of the message exchanged with the AI model.
/// The content encapsulates the core information or data being transmitted,
/// and its type can vary based on the specific implementation or use case.
/// </summary>
public T Content { get; init; }
}

View File

@ -0,0 +1,14 @@
namespace AIStudio.Provider;
/// <summary>
/// The none-generic base interface for messages exchanged with AI models.
/// </summary>
public interface IMessageBase
{
/// <summary>
/// Gets the role of the entity sending or receiving the message.
/// This property typically identifies whether the entity is acting
/// as a user, assistant, or system in the context of the interaction.
/// </summary>
public string Role { get; init; }
}

View File

@ -0,0 +1,32 @@
using System.Text.Json;
using System.Text.Json.Serialization;
namespace AIStudio.Provider;
/// <summary>
/// Custom JSON converter for the IMessageBase interface to handle polymorphic serialization.
/// </summary>
/// <remarks>
/// This converter ensures that when serializing IMessageBase objects, all properties
/// of the concrete implementation (e.g., TextMessage) are serialized, not just the
/// properties defined in the IMessageBase interface.
/// </remarks>
public sealed class MessageBaseConverter : JsonConverter<IMessageBase>
{
private static readonly ILogger<MessageBaseConverter> LOGGER = Program.LOGGER_FACTORY.CreateLogger<MessageBaseConverter>();
public override IMessageBase? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
{
// Deserialization is not needed for request objects, as messages are only serialized
// when sending requests to LLM providers.
LOGGER.LogError("Deserializing IMessageBase is not supported. This converter is only used for serializing request messages.");
return null;
}
public override void Write(Utf8JsonWriter writer, IMessageBase value, JsonSerializerOptions options)
{
// Serialize the actual concrete type (e.g., TextMessage) instead of just the IMessageBase interface.
// This ensures all properties of the concrete type are included in the JSON output.
JsonSerializer.Serialize(writer, value, value.GetType(), options);
}
}

View File

@ -12,7 +12,7 @@ namespace AIStudio.Provider.Mistral;
/// <param name="SafePrompt">Whether to inject a safety prompt before all conversations.</param>
public readonly record struct ChatRequest(
string Model,
IList<RegularMessage> Messages,
IList<IMessageBase> Messages,
bool Stream,
int RandomSeed,
bool SafePrompt = false

View File

@ -28,7 +28,7 @@ public sealed class ProviderMistral() : BaseProvider("https://api.mistral.ai/v1/
yield break;
// Prepare the system prompt:
var systemPrompt = new RegularMessage
var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@ -38,7 +38,7 @@ public sealed class ProviderMistral() : BaseProvider("https://api.mistral.ai/v1/
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new RegularMessage
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{

View File

@ -1,8 +1,13 @@
namespace AIStudio.Provider.Mistral;
/// <summary>
/// Regulat chat message model.
/// Text chat message model.
/// </summary>
/// <param name="Content">The text content of the message.</param>
/// <param name="Role">The role of the message.</param>
public readonly record struct RegularMessage(string Content, string Role);
public record TextMessage(string Content, string Role) : IMessage<string>
{
public TextMessage() : this(string.Empty, string.Empty)
{
}
}

View File

@ -10,7 +10,7 @@ namespace AIStudio.Provider.OpenAI;
/// <param name="Stream">Whether to stream the chat completion.</param>
public record ChatCompletionAPIRequest(
string Model,
IList<Message> Messages,
IList<IMessageBase> Messages,
bool Stream
)
{

View File

@ -70,7 +70,7 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
LOGGER.LogInformation("Using the system prompt role '{SystemPromptRole}' and the '{RequestPath}' API for model '{ChatModelId}'.", systemPromptRole, requestPath, chatModel.Id);
// Prepare the system prompt:
var systemPrompt = new Message
var systemPrompt = new TextMessage
{
Role = systemPromptRole,
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@ -90,7 +90,7 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
var apiParameters = this.ParseAdditionalApiParameters("input", "store", "tools");
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{
@ -119,9 +119,7 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
// All messages go into the messages field:
Messages = [systemPrompt, ..messages],
// Right now, we only support streaming completions:
@ -134,27 +132,8 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Input = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => systemPromptRole,
_ => "user",
},
Content = n.Content switch
{
ContentText text => text.Text,
_ => string.Empty,
}
}).ToList()],
// All messages go into the input field:
Input = [systemPrompt, ..messages],
// Right now, we only support streaming completions:
Stream = true,

View File

@ -12,7 +12,7 @@ namespace AIStudio.Provider.OpenAI;
/// <param name="Tools">The tools to use for the request.</param>
public record ResponsesAPIRequest(
string Model,
IList<Message> Input,
IList<IMessageBase> Input,
bool Stream,
bool Store,
IList<Tool> Tools)

View File

@ -5,4 +5,9 @@ namespace AIStudio.Provider.OpenAI;
/// </summary>
/// <param name="Content">The text content of the message.</param>
/// <param name="Role">The role of the message.</param>
public readonly record struct Message(string Content, string Role);
public record TextMessage(string Content, string Role) : IMessage<string>
{
public TextMessage() : this(string.Empty, string.Empty)
{
}
}

View File

@ -33,7 +33,7 @@ public sealed class ProviderOpenRouter() : BaseProvider("https://openrouter.ai/a
yield break;
// Prepare the system prompt:
var systemPrompt = new Message
var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@ -43,7 +43,7 @@ public sealed class ProviderOpenRouter() : BaseProvider("https://openrouter.ai/a
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{

View File

@ -39,7 +39,7 @@ public sealed class ProviderPerplexity() : BaseProvider("https://api.perplexity.
yield break;
// Prepare the system prompt:
var systemPrompt = new Message
var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@ -49,7 +49,7 @@ public sealed class ProviderPerplexity() : BaseProvider("https://api.perplexity.
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message()
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage()
{
Role = n.Role switch
{

View File

@ -10,7 +10,7 @@ namespace AIStudio.Provider.SelfHosted;
/// <param name="Stream">Whether to stream the chat completion.</param>
public readonly record struct ChatRequest(
string Model,
IList<Message> Messages,
IList<IMessageBase> Messages,
bool Stream
)
{

View File

@ -26,7 +26,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, isTrying: true);
// Prepare the system prompt:
var systemPrompt = new Message
var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@ -36,7 +36,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{

View File

@ -5,4 +5,9 @@ namespace AIStudio.Provider.SelfHosted;
/// </summary>
/// <param name="Content">The text content of the message.</param>
/// <param name="Role">The role of the message.</param>
public readonly record struct Message(string Content, string Role);
public record TextMessage(string Content, string Role) : IMessage<string>
{
public TextMessage() : this(string.Empty, string.Empty)
{
}
}

View File

@ -30,7 +30,7 @@ public sealed class ProviderX() : BaseProvider("https://api.x.ai/v1/", LOGGER)
yield break;
// Prepare the system prompt:
var systemPrompt = new Message
var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@ -40,7 +40,7 @@ public sealed class ProviderX() : BaseProvider("https://api.x.ai/v1/", LOGGER)
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message()
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{