diff --git a/app/MindWork AI Studio/Chat/ListContentBlockExtensions.cs b/app/MindWork AI Studio/Chat/ListContentBlockExtensions.cs
index f5e8c0ab..5c9883e7 100644
--- a/app/MindWork AI Studio/Chat/ListContentBlockExtensions.cs
+++ b/app/MindWork AI Studio/Chat/ListContentBlockExtensions.cs
@@ -1,4 +1,6 @@
-namespace AIStudio.Chat;
+using AIStudio.Provider;
+
+namespace AIStudio.Chat;
public static class ListContentBlockExtensions
{
@@ -7,9 +9,8 @@ public static class ListContentBlockExtensions
///
/// The list of content blocks to process.
/// A function that transforms each content block into a message result asynchronously.
- /// The type of the result produced by the transformation function.
/// An asynchronous task that resolves to a list of transformed results.
- public static async Task> BuildMessages(this List blocks, Func> transformer)
+ public static async Task> BuildMessages(this List blocks, Func> transformer)
{
var messages = blocks
.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text))
diff --git a/app/MindWork AI Studio/Provider/AlibabaCloud/ProviderAlibabaCloud.cs b/app/MindWork AI Studio/Provider/AlibabaCloud/ProviderAlibabaCloud.cs
index 78618db2..a5d6e29d 100644
--- a/app/MindWork AI Studio/Provider/AlibabaCloud/ProviderAlibabaCloud.cs
+++ b/app/MindWork AI Studio/Provider/AlibabaCloud/ProviderAlibabaCloud.cs
@@ -30,7 +30,7 @@ public sealed class ProviderAlibabaCloud() : BaseProvider("https://dashscope-int
yield break;
// Prepare the system prompt:
- var systemPrompt = new Message
+ var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@@ -40,7 +40,7 @@ public sealed class ProviderAlibabaCloud() : BaseProvider("https://dashscope-int
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
- var messages = await chatThread.Blocks.BuildMessages(async n => new Message
+ var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{
diff --git a/app/MindWork AI Studio/Provider/Anthropic/ChatRequest.cs b/app/MindWork AI Studio/Provider/Anthropic/ChatRequest.cs
index f7103bd7..d6df3990 100644
--- a/app/MindWork AI Studio/Provider/Anthropic/ChatRequest.cs
+++ b/app/MindWork AI Studio/Provider/Anthropic/ChatRequest.cs
@@ -1,5 +1,4 @@
using System.Text.Json.Serialization;
-using AIStudio.Provider.OpenAI;
namespace AIStudio.Provider.Anthropic;
@@ -13,7 +12,7 @@ namespace AIStudio.Provider.Anthropic;
/// The system prompt for the chat completion.
public readonly record struct ChatRequest(
string Model,
- IList Messages,
+ IList Messages,
int MaxTokens,
bool Stream,
string System
diff --git a/app/MindWork AI Studio/Provider/Anthropic/ProviderAnthropic.cs b/app/MindWork AI Studio/Provider/Anthropic/ProviderAnthropic.cs
index 4ea73e77..8a41d0f9 100644
--- a/app/MindWork AI Studio/Provider/Anthropic/ProviderAnthropic.cs
+++ b/app/MindWork AI Studio/Provider/Anthropic/ProviderAnthropic.cs
@@ -31,7 +31,7 @@ public sealed class ProviderAnthropic() : BaseProvider("https://api.anthropic.co
var apiParameters = this.ParseAdditionalApiParameters("system");
// Build the list of messages:
- var messages = await chatThread.Blocks.BuildMessages(async n => new Message
+ var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{
diff --git a/app/MindWork AI Studio/Provider/BaseProvider.cs b/app/MindWork AI Studio/Provider/BaseProvider.cs
index 00b4aa26..9b261646 100644
--- a/app/MindWork AI Studio/Provider/BaseProvider.cs
+++ b/app/MindWork AI Studio/Provider/BaseProvider.cs
@@ -40,7 +40,7 @@ public abstract class BaseProvider : IProvider, ISecretId
protected static readonly JsonSerializerOptions JSON_SERIALIZER_OPTIONS = new()
{
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
- Converters = { new AnnotationConverter() },
+ Converters = { new AnnotationConverter(), new MessageBaseConverter() },
AllowTrailingCommas = false
};
@@ -130,7 +130,7 @@ public abstract class BaseProvider : IProvider, ISecretId
if (nextResponse.StatusCode is HttpStatusCode.Forbidden)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Block, string.Format(TB("Tried to communicate with the LLM provider '{0}'. You might not be able to use this provider from your location. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
- this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
+ this.logger.LogError("Failed request with status code {ResponseStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
errorMessage = nextResponse.ReasonPhrase;
break;
}
@@ -138,7 +138,7 @@ public abstract class BaseProvider : IProvider, ISecretId
if(nextResponse.StatusCode is HttpStatusCode.BadRequest)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. The required message format might be changed. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
- this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
+ this.logger.LogError("Failed request with status code {ResponseStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
errorMessage = nextResponse.ReasonPhrase;
break;
}
@@ -146,7 +146,7 @@ public abstract class BaseProvider : IProvider, ISecretId
if(nextResponse.StatusCode is HttpStatusCode.NotFound)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. Something was not found. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
- this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
+ this.logger.LogError("Failed request with status code {ResponseStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
errorMessage = nextResponse.ReasonPhrase;
break;
}
@@ -154,7 +154,7 @@ public abstract class BaseProvider : IProvider, ISecretId
if(nextResponse.StatusCode is HttpStatusCode.Unauthorized)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Key, string.Format(TB("Tried to communicate with the LLM provider '{0}'. The API key might be invalid. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
- this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
+ this.logger.LogError("Failed request with status code {ResponseStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
errorMessage = nextResponse.ReasonPhrase;
break;
}
@@ -162,7 +162,7 @@ public abstract class BaseProvider : IProvider, ISecretId
if(nextResponse.StatusCode is HttpStatusCode.InternalServerError)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. The server might be down or having issues. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
- this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
+ this.logger.LogError("Failed request with status code {ResponseStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
errorMessage = nextResponse.ReasonPhrase;
break;
}
@@ -170,7 +170,7 @@ public abstract class BaseProvider : IProvider, ISecretId
if(nextResponse.StatusCode is HttpStatusCode.ServiceUnavailable)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. The provider is overloaded. The message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
- this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
+ this.logger.LogError("Failed request with status code {ResponseStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
errorMessage = nextResponse.ReasonPhrase;
break;
}
diff --git a/app/MindWork AI Studio/Provider/DeepSeek/ProviderDeepSeek.cs b/app/MindWork AI Studio/Provider/DeepSeek/ProviderDeepSeek.cs
index 991d6a2e..0f905486 100644
--- a/app/MindWork AI Studio/Provider/DeepSeek/ProviderDeepSeek.cs
+++ b/app/MindWork AI Studio/Provider/DeepSeek/ProviderDeepSeek.cs
@@ -30,7 +30,7 @@ public sealed class ProviderDeepSeek() : BaseProvider("https://api.deepseek.com/
yield break;
// Prepare the system prompt:
- var systemPrompt = new Message
+ var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@@ -40,7 +40,7 @@ public sealed class ProviderDeepSeek() : BaseProvider("https://api.deepseek.com/
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
- var messages = await chatThread.Blocks.BuildMessages(async n => new Message
+ var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{
diff --git a/app/MindWork AI Studio/Provider/Fireworks/ChatRequest.cs b/app/MindWork AI Studio/Provider/Fireworks/ChatRequest.cs
index 55154ece..54963feb 100644
--- a/app/MindWork AI Studio/Provider/Fireworks/ChatRequest.cs
+++ b/app/MindWork AI Studio/Provider/Fireworks/ChatRequest.cs
@@ -10,7 +10,7 @@ namespace AIStudio.Provider.Fireworks;
/// Whether to stream the chat completion.
public readonly record struct ChatRequest(
string Model,
- IList Messages,
+ IList Messages,
bool Stream
)
{
diff --git a/app/MindWork AI Studio/Provider/Fireworks/ProviderFireworks.cs b/app/MindWork AI Studio/Provider/Fireworks/ProviderFireworks.cs
index 20c79188..daee629d 100644
--- a/app/MindWork AI Studio/Provider/Fireworks/ProviderFireworks.cs
+++ b/app/MindWork AI Studio/Provider/Fireworks/ProviderFireworks.cs
@@ -30,7 +30,7 @@ public class ProviderFireworks() : BaseProvider("https://api.fireworks.ai/infere
yield break;
// Prepare the system prompt:
- var systemPrompt = new Message
+ var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@@ -40,7 +40,7 @@ public class ProviderFireworks() : BaseProvider("https://api.fireworks.ai/infere
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
- var messages = await chatThread.Blocks.BuildMessages(async n => new Message
+ var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{
diff --git a/app/MindWork AI Studio/Provider/Fireworks/Message.cs b/app/MindWork AI Studio/Provider/Fireworks/TextMessage.cs
similarity index 59%
rename from app/MindWork AI Studio/Provider/Fireworks/Message.cs
rename to app/MindWork AI Studio/Provider/Fireworks/TextMessage.cs
index 2b0055bd..36340b0f 100644
--- a/app/MindWork AI Studio/Provider/Fireworks/Message.cs
+++ b/app/MindWork AI Studio/Provider/Fireworks/TextMessage.cs
@@ -5,4 +5,9 @@ namespace AIStudio.Provider.Fireworks;
///
/// The text content of the message.
/// The role of the message.
-public readonly record struct Message(string Content, string Role);
\ No newline at end of file
+public record TextMessage(string Content, string Role) : IMessage
+{
+ public TextMessage() : this(string.Empty, string.Empty)
+ {
+ }
+}
\ No newline at end of file
diff --git a/app/MindWork AI Studio/Provider/GWDG/ProviderGWDG.cs b/app/MindWork AI Studio/Provider/GWDG/ProviderGWDG.cs
index b1cb291c..005d2f74 100644
--- a/app/MindWork AI Studio/Provider/GWDG/ProviderGWDG.cs
+++ b/app/MindWork AI Studio/Provider/GWDG/ProviderGWDG.cs
@@ -30,7 +30,7 @@ public sealed class ProviderGWDG() : BaseProvider("https://chat-ai.academiccloud
yield break;
// Prepare the system prompt:
- var systemPrompt = new Message
+ var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@@ -40,7 +40,7 @@ public sealed class ProviderGWDG() : BaseProvider("https://chat-ai.academiccloud
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
- var messages = await chatThread.Blocks.BuildMessages(async n => new Message
+ var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{
diff --git a/app/MindWork AI Studio/Provider/Google/ChatRequest.cs b/app/MindWork AI Studio/Provider/Google/ChatRequest.cs
index 4fcd03cc..1a898c3a 100644
--- a/app/MindWork AI Studio/Provider/Google/ChatRequest.cs
+++ b/app/MindWork AI Studio/Provider/Google/ChatRequest.cs
@@ -1,5 +1,4 @@
using System.Text.Json.Serialization;
-using AIStudio.Provider.OpenAI;
namespace AIStudio.Provider.Google;
@@ -11,7 +10,7 @@ namespace AIStudio.Provider.Google;
/// Whether to stream the chat completion.
public readonly record struct ChatRequest(
string Model,
- IList Messages,
+ IList Messages,
bool Stream
)
{
diff --git a/app/MindWork AI Studio/Provider/Google/ProviderGoogle.cs b/app/MindWork AI Studio/Provider/Google/ProviderGoogle.cs
index 7ce3f24e..98cc49ad 100644
--- a/app/MindWork AI Studio/Provider/Google/ProviderGoogle.cs
+++ b/app/MindWork AI Studio/Provider/Google/ProviderGoogle.cs
@@ -30,7 +30,7 @@ public class ProviderGoogle() : BaseProvider("https://generativelanguage.googlea
yield break;
// Prepare the system prompt:
- var systemPrompt = new Message
+ var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@@ -40,7 +40,7 @@ public class ProviderGoogle() : BaseProvider("https://generativelanguage.googlea
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
- var messages = await chatThread.Blocks.BuildMessages(async n => new Message
+ var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{
diff --git a/app/MindWork AI Studio/Provider/Groq/ChatRequest.cs b/app/MindWork AI Studio/Provider/Groq/ChatRequest.cs
index e45683fe..2e7668f1 100644
--- a/app/MindWork AI Studio/Provider/Groq/ChatRequest.cs
+++ b/app/MindWork AI Studio/Provider/Groq/ChatRequest.cs
@@ -1,5 +1,4 @@
using System.Text.Json.Serialization;
-using AIStudio.Provider.OpenAI;
namespace AIStudio.Provider.Groq;
@@ -12,7 +11,7 @@ namespace AIStudio.Provider.Groq;
/// The seed for the chat completion.
public readonly record struct ChatRequest(
string Model,
- IList Messages,
+ IList Messages,
bool Stream,
int Seed
)
diff --git a/app/MindWork AI Studio/Provider/Groq/ProviderGroq.cs b/app/MindWork AI Studio/Provider/Groq/ProviderGroq.cs
index 45473d82..c0048379 100644
--- a/app/MindWork AI Studio/Provider/Groq/ProviderGroq.cs
+++ b/app/MindWork AI Studio/Provider/Groq/ProviderGroq.cs
@@ -30,7 +30,7 @@ public class ProviderGroq() : BaseProvider("https://api.groq.com/openai/v1/", LO
yield break;
// Prepare the system prompt:
- var systemPrompt = new Message
+ var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@@ -40,7 +40,7 @@ public class ProviderGroq() : BaseProvider("https://api.groq.com/openai/v1/", LO
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
- var messages = await chatThread.Blocks.BuildMessages(async n => new Message
+ var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{
diff --git a/app/MindWork AI Studio/Provider/Helmholtz/ProviderHelmholtz.cs b/app/MindWork AI Studio/Provider/Helmholtz/ProviderHelmholtz.cs
index 3f7b405b..19a204d4 100644
--- a/app/MindWork AI Studio/Provider/Helmholtz/ProviderHelmholtz.cs
+++ b/app/MindWork AI Studio/Provider/Helmholtz/ProviderHelmholtz.cs
@@ -30,7 +30,7 @@ public sealed class ProviderHelmholtz() : BaseProvider("https://api.helmholtz-bl
yield break;
// Prepare the system prompt:
- var systemPrompt = new Message
+ var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@@ -40,7 +40,7 @@ public sealed class ProviderHelmholtz() : BaseProvider("https://api.helmholtz-bl
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
- var messages = await chatThread.Blocks.BuildMessages(async n => new Message
+ var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{
diff --git a/app/MindWork AI Studio/Provider/HuggingFace/ProviderHuggingFace.cs b/app/MindWork AI Studio/Provider/HuggingFace/ProviderHuggingFace.cs
index 31522b5c..f7c7acbd 100644
--- a/app/MindWork AI Studio/Provider/HuggingFace/ProviderHuggingFace.cs
+++ b/app/MindWork AI Studio/Provider/HuggingFace/ProviderHuggingFace.cs
@@ -35,7 +35,7 @@ public sealed class ProviderHuggingFace : BaseProvider
yield break;
// Prepare the system prompt:
- var systemPrompt = new Message
+ var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@@ -45,7 +45,7 @@ public sealed class ProviderHuggingFace : BaseProvider
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
- var message = await chatThread.Blocks.BuildMessages(async n => new Message
+ var message = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{
diff --git a/app/MindWork AI Studio/Provider/IMessage.cs b/app/MindWork AI Studio/Provider/IMessage.cs
new file mode 100644
index 00000000..2a8c9e2f
--- /dev/null
+++ b/app/MindWork AI Studio/Provider/IMessage.cs
@@ -0,0 +1,15 @@
+namespace AIStudio.Provider;
+
+///
+/// Standard interface for messages exchanged with AI models.
+///
+/// The type of the message content.
+public interface IMessage : IMessageBase
+{
+ ///
+ /// Gets the main content of the message exchanged with the AI model.
+ /// The content encapsulates the core information or data being transmitted,
+ /// and its type can vary based on the specific implementation or use case.
+ ///
+ public T Content { get; init; }
+}
\ No newline at end of file
diff --git a/app/MindWork AI Studio/Provider/IMessageBase.cs b/app/MindWork AI Studio/Provider/IMessageBase.cs
new file mode 100644
index 00000000..8f67cc8b
--- /dev/null
+++ b/app/MindWork AI Studio/Provider/IMessageBase.cs
@@ -0,0 +1,14 @@
+namespace AIStudio.Provider;
+
+///
+/// The none-generic base interface for messages exchanged with AI models.
+///
+public interface IMessageBase
+{
+ ///
+ /// Gets the role of the entity sending or receiving the message.
+ /// This property typically identifies whether the entity is acting
+ /// as a user, assistant, or system in the context of the interaction.
+ ///
+ public string Role { get; init; }
+}
\ No newline at end of file
diff --git a/app/MindWork AI Studio/Provider/MessageBaseConverter.cs b/app/MindWork AI Studio/Provider/MessageBaseConverter.cs
new file mode 100644
index 00000000..7707736e
--- /dev/null
+++ b/app/MindWork AI Studio/Provider/MessageBaseConverter.cs
@@ -0,0 +1,32 @@
+using System.Text.Json;
+using System.Text.Json.Serialization;
+
+namespace AIStudio.Provider;
+
+///
+/// Custom JSON converter for the IMessageBase interface to handle polymorphic serialization.
+///
+///
+/// This converter ensures that when serializing IMessageBase objects, all properties
+/// of the concrete implementation (e.g., TextMessage) are serialized, not just the
+/// properties defined in the IMessageBase interface.
+///
+public sealed class MessageBaseConverter : JsonConverter
+{
+ private static readonly ILogger LOGGER = Program.LOGGER_FACTORY.CreateLogger();
+
+ public override IMessageBase? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
+ {
+ // Deserialization is not needed for request objects, as messages are only serialized
+ // when sending requests to LLM providers.
+ LOGGER.LogError("Deserializing IMessageBase is not supported. This converter is only used for serializing request messages.");
+ return null;
+ }
+
+ public override void Write(Utf8JsonWriter writer, IMessageBase value, JsonSerializerOptions options)
+ {
+ // Serialize the actual concrete type (e.g., TextMessage) instead of just the IMessageBase interface.
+ // This ensures all properties of the concrete type are included in the JSON output.
+ JsonSerializer.Serialize(writer, value, value.GetType(), options);
+ }
+}
diff --git a/app/MindWork AI Studio/Provider/Mistral/ChatRequest.cs b/app/MindWork AI Studio/Provider/Mistral/ChatRequest.cs
index b12dd15d..01a45a89 100644
--- a/app/MindWork AI Studio/Provider/Mistral/ChatRequest.cs
+++ b/app/MindWork AI Studio/Provider/Mistral/ChatRequest.cs
@@ -12,7 +12,7 @@ namespace AIStudio.Provider.Mistral;
/// Whether to inject a safety prompt before all conversations.
public readonly record struct ChatRequest(
string Model,
- IList Messages,
+ IList Messages,
bool Stream,
int RandomSeed,
bool SafePrompt = false
diff --git a/app/MindWork AI Studio/Provider/Mistral/ProviderMistral.cs b/app/MindWork AI Studio/Provider/Mistral/ProviderMistral.cs
index bd999b92..8bf55ee0 100644
--- a/app/MindWork AI Studio/Provider/Mistral/ProviderMistral.cs
+++ b/app/MindWork AI Studio/Provider/Mistral/ProviderMistral.cs
@@ -28,7 +28,7 @@ public sealed class ProviderMistral() : BaseProvider("https://api.mistral.ai/v1/
yield break;
// Prepare the system prompt:
- var systemPrompt = new RegularMessage
+ var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@@ -38,7 +38,7 @@ public sealed class ProviderMistral() : BaseProvider("https://api.mistral.ai/v1/
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
- var messages = await chatThread.Blocks.BuildMessages(async n => new RegularMessage
+ var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{
diff --git a/app/MindWork AI Studio/Provider/Mistral/RegularMessage.cs b/app/MindWork AI Studio/Provider/Mistral/TextMessage.cs
similarity index 51%
rename from app/MindWork AI Studio/Provider/Mistral/RegularMessage.cs
rename to app/MindWork AI Studio/Provider/Mistral/TextMessage.cs
index df5bdcd3..9606a9d8 100644
--- a/app/MindWork AI Studio/Provider/Mistral/RegularMessage.cs
+++ b/app/MindWork AI Studio/Provider/Mistral/TextMessage.cs
@@ -1,8 +1,13 @@
namespace AIStudio.Provider.Mistral;
///
-/// Regulat chat message model.
+/// Text chat message model.
///
/// The text content of the message.
/// The role of the message.
-public readonly record struct RegularMessage(string Content, string Role);
\ No newline at end of file
+public record TextMessage(string Content, string Role) : IMessage
+{
+ public TextMessage() : this(string.Empty, string.Empty)
+ {
+ }
+}
\ No newline at end of file
diff --git a/app/MindWork AI Studio/Provider/OpenAI/ChatCompletionAPIRequest.cs b/app/MindWork AI Studio/Provider/OpenAI/ChatCompletionAPIRequest.cs
index 51805910..bd9c08e7 100644
--- a/app/MindWork AI Studio/Provider/OpenAI/ChatCompletionAPIRequest.cs
+++ b/app/MindWork AI Studio/Provider/OpenAI/ChatCompletionAPIRequest.cs
@@ -10,7 +10,7 @@ namespace AIStudio.Provider.OpenAI;
/// Whether to stream the chat completion.
public record ChatCompletionAPIRequest(
string Model,
- IList Messages,
+ IList Messages,
bool Stream
)
{
diff --git a/app/MindWork AI Studio/Provider/OpenAI/ProviderOpenAI.cs b/app/MindWork AI Studio/Provider/OpenAI/ProviderOpenAI.cs
index 89da7b7d..7b91a7ae 100644
--- a/app/MindWork AI Studio/Provider/OpenAI/ProviderOpenAI.cs
+++ b/app/MindWork AI Studio/Provider/OpenAI/ProviderOpenAI.cs
@@ -70,7 +70,7 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
LOGGER.LogInformation("Using the system prompt role '{SystemPromptRole}' and the '{RequestPath}' API for model '{ChatModelId}'.", systemPromptRole, requestPath, chatModel.Id);
// Prepare the system prompt:
- var systemPrompt = new Message
+ var systemPrompt = new TextMessage
{
Role = systemPromptRole,
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@@ -90,7 +90,7 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
var apiParameters = this.ParseAdditionalApiParameters("input", "store", "tools");
// Build the list of messages:
- var messages = await chatThread.Blocks.BuildMessages(async n => new Message
+ var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{
@@ -119,9 +119,7 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
{
Model = chatModel.Id,
- // Build the messages:
- // - First of all the system prompt
- // - Then none-empty user and AI messages
+ // All messages go into the messages field:
Messages = [systemPrompt, ..messages],
// Right now, we only support streaming completions:
@@ -134,27 +132,8 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
{
Model = chatModel.Id,
- // Build the messages:
- // - First of all the system prompt
- // - Then none-empty user and AI messages
- Input = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
- {
- Role = n.Role switch
- {
- ChatRole.USER => "user",
- ChatRole.AI => "assistant",
- ChatRole.AGENT => "assistant",
- ChatRole.SYSTEM => systemPromptRole,
-
- _ => "user",
- },
-
- Content = n.Content switch
- {
- ContentText text => text.Text,
- _ => string.Empty,
- }
- }).ToList()],
+ // All messages go into the input field:
+ Input = [systemPrompt, ..messages],
// Right now, we only support streaming completions:
Stream = true,
diff --git a/app/MindWork AI Studio/Provider/OpenAI/ResponsesAPIRequest.cs b/app/MindWork AI Studio/Provider/OpenAI/ResponsesAPIRequest.cs
index e3c9541b..deb315d6 100644
--- a/app/MindWork AI Studio/Provider/OpenAI/ResponsesAPIRequest.cs
+++ b/app/MindWork AI Studio/Provider/OpenAI/ResponsesAPIRequest.cs
@@ -12,7 +12,7 @@ namespace AIStudio.Provider.OpenAI;
/// The tools to use for the request.
public record ResponsesAPIRequest(
string Model,
- IList Input,
+ IList Input,
bool Stream,
bool Store,
IList Tools)
diff --git a/app/MindWork AI Studio/Provider/OpenAI/Message.cs b/app/MindWork AI Studio/Provider/OpenAI/TextMessage.cs
similarity index 58%
rename from app/MindWork AI Studio/Provider/OpenAI/Message.cs
rename to app/MindWork AI Studio/Provider/OpenAI/TextMessage.cs
index 508645b0..ceaeb73d 100644
--- a/app/MindWork AI Studio/Provider/OpenAI/Message.cs
+++ b/app/MindWork AI Studio/Provider/OpenAI/TextMessage.cs
@@ -5,4 +5,9 @@ namespace AIStudio.Provider.OpenAI;
///
/// The text content of the message.
/// The role of the message.
-public readonly record struct Message(string Content, string Role);
\ No newline at end of file
+public record TextMessage(string Content, string Role) : IMessage
+{
+ public TextMessage() : this(string.Empty, string.Empty)
+ {
+ }
+}
\ No newline at end of file
diff --git a/app/MindWork AI Studio/Provider/OpenRouter/ProviderOpenRouter.cs b/app/MindWork AI Studio/Provider/OpenRouter/ProviderOpenRouter.cs
index 2b5e5780..0407961e 100644
--- a/app/MindWork AI Studio/Provider/OpenRouter/ProviderOpenRouter.cs
+++ b/app/MindWork AI Studio/Provider/OpenRouter/ProviderOpenRouter.cs
@@ -33,7 +33,7 @@ public sealed class ProviderOpenRouter() : BaseProvider("https://openrouter.ai/a
yield break;
// Prepare the system prompt:
- var systemPrompt = new Message
+ var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@@ -43,7 +43,7 @@ public sealed class ProviderOpenRouter() : BaseProvider("https://openrouter.ai/a
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
- var messages = await chatThread.Blocks.BuildMessages(async n => new Message
+ var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{
diff --git a/app/MindWork AI Studio/Provider/Perplexity/ProviderPerplexity.cs b/app/MindWork AI Studio/Provider/Perplexity/ProviderPerplexity.cs
index 3687ad7b..40672460 100644
--- a/app/MindWork AI Studio/Provider/Perplexity/ProviderPerplexity.cs
+++ b/app/MindWork AI Studio/Provider/Perplexity/ProviderPerplexity.cs
@@ -39,7 +39,7 @@ public sealed class ProviderPerplexity() : BaseProvider("https://api.perplexity.
yield break;
// Prepare the system prompt:
- var systemPrompt = new Message
+ var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@@ -49,7 +49,7 @@ public sealed class ProviderPerplexity() : BaseProvider("https://api.perplexity.
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
- var messages = await chatThread.Blocks.BuildMessages(async n => new Message()
+ var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage()
{
Role = n.Role switch
{
diff --git a/app/MindWork AI Studio/Provider/SelfHosted/ChatRequest.cs b/app/MindWork AI Studio/Provider/SelfHosted/ChatRequest.cs
index 4791692c..e1da56bd 100644
--- a/app/MindWork AI Studio/Provider/SelfHosted/ChatRequest.cs
+++ b/app/MindWork AI Studio/Provider/SelfHosted/ChatRequest.cs
@@ -10,7 +10,7 @@ namespace AIStudio.Provider.SelfHosted;
/// Whether to stream the chat completion.
public readonly record struct ChatRequest(
string Model,
- IList Messages,
+ IList Messages,
bool Stream
)
{
diff --git a/app/MindWork AI Studio/Provider/SelfHosted/ProviderSelfHosted.cs b/app/MindWork AI Studio/Provider/SelfHosted/ProviderSelfHosted.cs
index 4389099e..e06e3adc 100644
--- a/app/MindWork AI Studio/Provider/SelfHosted/ProviderSelfHosted.cs
+++ b/app/MindWork AI Studio/Provider/SelfHosted/ProviderSelfHosted.cs
@@ -26,7 +26,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, isTrying: true);
// Prepare the system prompt:
- var systemPrompt = new Message
+ var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@@ -36,7 +36,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
- var messages = await chatThread.Blocks.BuildMessages(async n => new Message
+ var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{
diff --git a/app/MindWork AI Studio/Provider/SelfHosted/Message.cs b/app/MindWork AI Studio/Provider/SelfHosted/TextMessage.cs
similarity index 59%
rename from app/MindWork AI Studio/Provider/SelfHosted/Message.cs
rename to app/MindWork AI Studio/Provider/SelfHosted/TextMessage.cs
index e4ecc70a..1479257d 100644
--- a/app/MindWork AI Studio/Provider/SelfHosted/Message.cs
+++ b/app/MindWork AI Studio/Provider/SelfHosted/TextMessage.cs
@@ -5,4 +5,9 @@ namespace AIStudio.Provider.SelfHosted;
///
/// The text content of the message.
/// The role of the message.
-public readonly record struct Message(string Content, string Role);
\ No newline at end of file
+public record TextMessage(string Content, string Role) : IMessage
+{
+ public TextMessage() : this(string.Empty, string.Empty)
+ {
+ }
+}
\ No newline at end of file
diff --git a/app/MindWork AI Studio/Provider/X/ProviderX.cs b/app/MindWork AI Studio/Provider/X/ProviderX.cs
index e8a0b2e7..28d01a71 100644
--- a/app/MindWork AI Studio/Provider/X/ProviderX.cs
+++ b/app/MindWork AI Studio/Provider/X/ProviderX.cs
@@ -30,7 +30,7 @@ public sealed class ProviderX() : BaseProvider("https://api.x.ai/v1/", LOGGER)
yield break;
// Prepare the system prompt:
- var systemPrompt = new Message
+ var systemPrompt = new TextMessage
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
@@ -40,7 +40,7 @@ public sealed class ProviderX() : BaseProvider("https://api.x.ai/v1/", LOGGER)
var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
- var messages = await chatThread.Blocks.BuildMessages(async n => new Message()
+ var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
{
Role = n.Role switch
{