mirror of
https://github.com/MindWorkAI/AI-Studio.git
synced 2026-02-12 07:01:37 +00:00
Refactored chat requests (#607)
Some checks are pending
Build and Release / Read metadata (push) Waiting to run
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-apple-darwin, osx-arm64, macos-latest, aarch64-apple-darwin, dmg updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-pc-windows-msvc.exe, win-arm64, windows-latest, aarch64-pc-windows-msvc, nsis updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-unknown-linux-gnu, linux-arm64, ubuntu-22.04-arm, aarch64-unknown-linux-gnu, appimage deb updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-apple-darwin, osx-x64, macos-latest, x86_64-apple-darwin, dmg updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-pc-windows-msvc.exe, win-x64, windows-latest, x86_64-pc-windows-msvc, nsis updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-unknown-linux-gnu, linux-x64, ubuntu-22.04, x86_64-unknown-linux-gnu, appimage deb updater) (push) Blocked by required conditions
Build and Release / Prepare & create release (push) Blocked by required conditions
Build and Release / Publish release (push) Blocked by required conditions
Some checks are pending
Build and Release / Read metadata (push) Waiting to run
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-apple-darwin, osx-arm64, macos-latest, aarch64-apple-darwin, dmg updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-pc-windows-msvc.exe, win-arm64, windows-latest, aarch64-pc-windows-msvc, nsis updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-unknown-linux-gnu, linux-arm64, ubuntu-22.04-arm, aarch64-unknown-linux-gnu, appimage deb updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-apple-darwin, osx-x64, macos-latest, x86_64-apple-darwin, dmg updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-pc-windows-msvc.exe, win-x64, windows-latest, x86_64-pc-windows-msvc, nsis updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-unknown-linux-gnu, linux-x64, ubuntu-22.04, x86_64-unknown-linux-gnu, appimage deb updater) (push) Blocked by required conditions
Build and Release / Prepare & create release (push) Blocked by required conditions
Build and Release / Publish release (push) Blocked by required conditions
This commit is contained in:
parent
ef3d58cbee
commit
4be5002088
@ -1,4 +1,6 @@
|
|||||||
namespace AIStudio.Chat;
|
using AIStudio.Provider;
|
||||||
|
|
||||||
|
namespace AIStudio.Chat;
|
||||||
|
|
||||||
public static class ListContentBlockExtensions
|
public static class ListContentBlockExtensions
|
||||||
{
|
{
|
||||||
@ -7,9 +9,8 @@ public static class ListContentBlockExtensions
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="blocks">The list of content blocks to process.</param>
|
/// <param name="blocks">The list of content blocks to process.</param>
|
||||||
/// <param name="transformer">A function that transforms each content block into a message result asynchronously.</param>
|
/// <param name="transformer">A function that transforms each content block into a message result asynchronously.</param>
|
||||||
/// <typeparam name="TResult">The type of the result produced by the transformation function.</typeparam>
|
|
||||||
/// <returns>An asynchronous task that resolves to a list of transformed results.</returns>
|
/// <returns>An asynchronous task that resolves to a list of transformed results.</returns>
|
||||||
public static async Task<IList<TResult>> BuildMessages<TResult>(this List<ContentBlock> blocks, Func<ContentBlock, Task<TResult>> transformer)
|
public static async Task<IList<IMessageBase>> BuildMessages(this List<ContentBlock> blocks, Func<ContentBlock, Task<IMessageBase>> transformer)
|
||||||
{
|
{
|
||||||
var messages = blocks
|
var messages = blocks
|
||||||
.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text))
|
.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text))
|
||||||
|
|||||||
@ -30,7 +30,7 @@ public sealed class ProviderAlibabaCloud() : BaseProvider("https://dashscope-int
|
|||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
// Prepare the system prompt:
|
// Prepare the system prompt:
|
||||||
var systemPrompt = new Message
|
var systemPrompt = new TextMessage
|
||||||
{
|
{
|
||||||
Role = "system",
|
Role = "system",
|
||||||
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
||||||
@ -40,7 +40,7 @@ public sealed class ProviderAlibabaCloud() : BaseProvider("https://dashscope-int
|
|||||||
var apiParameters = this.ParseAdditionalApiParameters();
|
var apiParameters = this.ParseAdditionalApiParameters();
|
||||||
|
|
||||||
// Build the list of messages:
|
// Build the list of messages:
|
||||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
|
||||||
{
|
{
|
||||||
Role = n.Role switch
|
Role = n.Role switch
|
||||||
{
|
{
|
||||||
|
|||||||
@ -1,5 +1,4 @@
|
|||||||
using System.Text.Json.Serialization;
|
using System.Text.Json.Serialization;
|
||||||
using AIStudio.Provider.OpenAI;
|
|
||||||
|
|
||||||
namespace AIStudio.Provider.Anthropic;
|
namespace AIStudio.Provider.Anthropic;
|
||||||
|
|
||||||
@ -13,7 +12,7 @@ namespace AIStudio.Provider.Anthropic;
|
|||||||
/// <param name="System">The system prompt for the chat completion.</param>
|
/// <param name="System">The system prompt for the chat completion.</param>
|
||||||
public readonly record struct ChatRequest(
|
public readonly record struct ChatRequest(
|
||||||
string Model,
|
string Model,
|
||||||
IList<Message> Messages,
|
IList<IMessageBase> Messages,
|
||||||
int MaxTokens,
|
int MaxTokens,
|
||||||
bool Stream,
|
bool Stream,
|
||||||
string System
|
string System
|
||||||
|
|||||||
@ -31,7 +31,7 @@ public sealed class ProviderAnthropic() : BaseProvider("https://api.anthropic.co
|
|||||||
var apiParameters = this.ParseAdditionalApiParameters("system");
|
var apiParameters = this.ParseAdditionalApiParameters("system");
|
||||||
|
|
||||||
// Build the list of messages:
|
// Build the list of messages:
|
||||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
|
||||||
{
|
{
|
||||||
Role = n.Role switch
|
Role = n.Role switch
|
||||||
{
|
{
|
||||||
|
|||||||
@ -40,7 +40,7 @@ public abstract class BaseProvider : IProvider, ISecretId
|
|||||||
protected static readonly JsonSerializerOptions JSON_SERIALIZER_OPTIONS = new()
|
protected static readonly JsonSerializerOptions JSON_SERIALIZER_OPTIONS = new()
|
||||||
{
|
{
|
||||||
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
|
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
|
||||||
Converters = { new AnnotationConverter() },
|
Converters = { new AnnotationConverter(), new MessageBaseConverter() },
|
||||||
AllowTrailingCommas = false
|
AllowTrailingCommas = false
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -130,7 +130,7 @@ public abstract class BaseProvider : IProvider, ISecretId
|
|||||||
if (nextResponse.StatusCode is HttpStatusCode.Forbidden)
|
if (nextResponse.StatusCode is HttpStatusCode.Forbidden)
|
||||||
{
|
{
|
||||||
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Block, string.Format(TB("Tried to communicate with the LLM provider '{0}'. You might not be able to use this provider from your location. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
|
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Block, string.Format(TB("Tried to communicate with the LLM provider '{0}'. You might not be able to use this provider from your location. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
|
||||||
this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
|
this.logger.LogError("Failed request with status code {ResponseStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
|
||||||
errorMessage = nextResponse.ReasonPhrase;
|
errorMessage = nextResponse.ReasonPhrase;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -138,7 +138,7 @@ public abstract class BaseProvider : IProvider, ISecretId
|
|||||||
if(nextResponse.StatusCode is HttpStatusCode.BadRequest)
|
if(nextResponse.StatusCode is HttpStatusCode.BadRequest)
|
||||||
{
|
{
|
||||||
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. The required message format might be changed. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
|
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. The required message format might be changed. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
|
||||||
this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
|
this.logger.LogError("Failed request with status code {ResponseStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
|
||||||
errorMessage = nextResponse.ReasonPhrase;
|
errorMessage = nextResponse.ReasonPhrase;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -146,7 +146,7 @@ public abstract class BaseProvider : IProvider, ISecretId
|
|||||||
if(nextResponse.StatusCode is HttpStatusCode.NotFound)
|
if(nextResponse.StatusCode is HttpStatusCode.NotFound)
|
||||||
{
|
{
|
||||||
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. Something was not found. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
|
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. Something was not found. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
|
||||||
this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
|
this.logger.LogError("Failed request with status code {ResponseStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
|
||||||
errorMessage = nextResponse.ReasonPhrase;
|
errorMessage = nextResponse.ReasonPhrase;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -154,7 +154,7 @@ public abstract class BaseProvider : IProvider, ISecretId
|
|||||||
if(nextResponse.StatusCode is HttpStatusCode.Unauthorized)
|
if(nextResponse.StatusCode is HttpStatusCode.Unauthorized)
|
||||||
{
|
{
|
||||||
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Key, string.Format(TB("Tried to communicate with the LLM provider '{0}'. The API key might be invalid. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
|
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Key, string.Format(TB("Tried to communicate with the LLM provider '{0}'. The API key might be invalid. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
|
||||||
this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
|
this.logger.LogError("Failed request with status code {ResponseStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
|
||||||
errorMessage = nextResponse.ReasonPhrase;
|
errorMessage = nextResponse.ReasonPhrase;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -162,7 +162,7 @@ public abstract class BaseProvider : IProvider, ISecretId
|
|||||||
if(nextResponse.StatusCode is HttpStatusCode.InternalServerError)
|
if(nextResponse.StatusCode is HttpStatusCode.InternalServerError)
|
||||||
{
|
{
|
||||||
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. The server might be down or having issues. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
|
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. The server might be down or having issues. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
|
||||||
this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
|
this.logger.LogError("Failed request with status code {ResponseStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
|
||||||
errorMessage = nextResponse.ReasonPhrase;
|
errorMessage = nextResponse.ReasonPhrase;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -170,7 +170,7 @@ public abstract class BaseProvider : IProvider, ISecretId
|
|||||||
if(nextResponse.StatusCode is HttpStatusCode.ServiceUnavailable)
|
if(nextResponse.StatusCode is HttpStatusCode.ServiceUnavailable)
|
||||||
{
|
{
|
||||||
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. The provider is overloaded. The message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
|
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. The provider is overloaded. The message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
|
||||||
this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
|
this.logger.LogError("Failed request with status code {ResponseStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
|
||||||
errorMessage = nextResponse.ReasonPhrase;
|
errorMessage = nextResponse.ReasonPhrase;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -30,7 +30,7 @@ public sealed class ProviderDeepSeek() : BaseProvider("https://api.deepseek.com/
|
|||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
// Prepare the system prompt:
|
// Prepare the system prompt:
|
||||||
var systemPrompt = new Message
|
var systemPrompt = new TextMessage
|
||||||
{
|
{
|
||||||
Role = "system",
|
Role = "system",
|
||||||
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
||||||
@ -40,7 +40,7 @@ public sealed class ProviderDeepSeek() : BaseProvider("https://api.deepseek.com/
|
|||||||
var apiParameters = this.ParseAdditionalApiParameters();
|
var apiParameters = this.ParseAdditionalApiParameters();
|
||||||
|
|
||||||
// Build the list of messages:
|
// Build the list of messages:
|
||||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
|
||||||
{
|
{
|
||||||
Role = n.Role switch
|
Role = n.Role switch
|
||||||
{
|
{
|
||||||
|
|||||||
@ -10,7 +10,7 @@ namespace AIStudio.Provider.Fireworks;
|
|||||||
/// <param name="Stream">Whether to stream the chat completion.</param>
|
/// <param name="Stream">Whether to stream the chat completion.</param>
|
||||||
public readonly record struct ChatRequest(
|
public readonly record struct ChatRequest(
|
||||||
string Model,
|
string Model,
|
||||||
IList<Message> Messages,
|
IList<IMessageBase> Messages,
|
||||||
bool Stream
|
bool Stream
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
|
|||||||
@ -30,7 +30,7 @@ public class ProviderFireworks() : BaseProvider("https://api.fireworks.ai/infere
|
|||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
// Prepare the system prompt:
|
// Prepare the system prompt:
|
||||||
var systemPrompt = new Message
|
var systemPrompt = new TextMessage
|
||||||
{
|
{
|
||||||
Role = "system",
|
Role = "system",
|
||||||
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
||||||
@ -40,7 +40,7 @@ public class ProviderFireworks() : BaseProvider("https://api.fireworks.ai/infere
|
|||||||
var apiParameters = this.ParseAdditionalApiParameters();
|
var apiParameters = this.ParseAdditionalApiParameters();
|
||||||
|
|
||||||
// Build the list of messages:
|
// Build the list of messages:
|
||||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
|
||||||
{
|
{
|
||||||
Role = n.Role switch
|
Role = n.Role switch
|
||||||
{
|
{
|
||||||
|
|||||||
@ -5,4 +5,9 @@ namespace AIStudio.Provider.Fireworks;
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="Content">The text content of the message.</param>
|
/// <param name="Content">The text content of the message.</param>
|
||||||
/// <param name="Role">The role of the message.</param>
|
/// <param name="Role">The role of the message.</param>
|
||||||
public readonly record struct Message(string Content, string Role);
|
public record TextMessage(string Content, string Role) : IMessage<string>
|
||||||
|
{
|
||||||
|
public TextMessage() : this(string.Empty, string.Empty)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -30,7 +30,7 @@ public sealed class ProviderGWDG() : BaseProvider("https://chat-ai.academiccloud
|
|||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
// Prepare the system prompt:
|
// Prepare the system prompt:
|
||||||
var systemPrompt = new Message
|
var systemPrompt = new TextMessage
|
||||||
{
|
{
|
||||||
Role = "system",
|
Role = "system",
|
||||||
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
||||||
@ -40,7 +40,7 @@ public sealed class ProviderGWDG() : BaseProvider("https://chat-ai.academiccloud
|
|||||||
var apiParameters = this.ParseAdditionalApiParameters();
|
var apiParameters = this.ParseAdditionalApiParameters();
|
||||||
|
|
||||||
// Build the list of messages:
|
// Build the list of messages:
|
||||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
|
||||||
{
|
{
|
||||||
Role = n.Role switch
|
Role = n.Role switch
|
||||||
{
|
{
|
||||||
|
|||||||
@ -1,5 +1,4 @@
|
|||||||
using System.Text.Json.Serialization;
|
using System.Text.Json.Serialization;
|
||||||
using AIStudio.Provider.OpenAI;
|
|
||||||
|
|
||||||
namespace AIStudio.Provider.Google;
|
namespace AIStudio.Provider.Google;
|
||||||
|
|
||||||
@ -11,7 +10,7 @@ namespace AIStudio.Provider.Google;
|
|||||||
/// <param name="Stream">Whether to stream the chat completion.</param>
|
/// <param name="Stream">Whether to stream the chat completion.</param>
|
||||||
public readonly record struct ChatRequest(
|
public readonly record struct ChatRequest(
|
||||||
string Model,
|
string Model,
|
||||||
IList<Message> Messages,
|
IList<IMessageBase> Messages,
|
||||||
bool Stream
|
bool Stream
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
|
|||||||
@ -30,7 +30,7 @@ public class ProviderGoogle() : BaseProvider("https://generativelanguage.googlea
|
|||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
// Prepare the system prompt:
|
// Prepare the system prompt:
|
||||||
var systemPrompt = new Message
|
var systemPrompt = new TextMessage
|
||||||
{
|
{
|
||||||
Role = "system",
|
Role = "system",
|
||||||
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
||||||
@ -40,7 +40,7 @@ public class ProviderGoogle() : BaseProvider("https://generativelanguage.googlea
|
|||||||
var apiParameters = this.ParseAdditionalApiParameters();
|
var apiParameters = this.ParseAdditionalApiParameters();
|
||||||
|
|
||||||
// Build the list of messages:
|
// Build the list of messages:
|
||||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
|
||||||
{
|
{
|
||||||
Role = n.Role switch
|
Role = n.Role switch
|
||||||
{
|
{
|
||||||
|
|||||||
@ -1,5 +1,4 @@
|
|||||||
using System.Text.Json.Serialization;
|
using System.Text.Json.Serialization;
|
||||||
using AIStudio.Provider.OpenAI;
|
|
||||||
|
|
||||||
namespace AIStudio.Provider.Groq;
|
namespace AIStudio.Provider.Groq;
|
||||||
|
|
||||||
@ -12,7 +11,7 @@ namespace AIStudio.Provider.Groq;
|
|||||||
/// <param name="Seed">The seed for the chat completion.</param>
|
/// <param name="Seed">The seed for the chat completion.</param>
|
||||||
public readonly record struct ChatRequest(
|
public readonly record struct ChatRequest(
|
||||||
string Model,
|
string Model,
|
||||||
IList<Message> Messages,
|
IList<IMessageBase> Messages,
|
||||||
bool Stream,
|
bool Stream,
|
||||||
int Seed
|
int Seed
|
||||||
)
|
)
|
||||||
|
|||||||
@ -30,7 +30,7 @@ public class ProviderGroq() : BaseProvider("https://api.groq.com/openai/v1/", LO
|
|||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
// Prepare the system prompt:
|
// Prepare the system prompt:
|
||||||
var systemPrompt = new Message
|
var systemPrompt = new TextMessage
|
||||||
{
|
{
|
||||||
Role = "system",
|
Role = "system",
|
||||||
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
||||||
@ -40,7 +40,7 @@ public class ProviderGroq() : BaseProvider("https://api.groq.com/openai/v1/", LO
|
|||||||
var apiParameters = this.ParseAdditionalApiParameters();
|
var apiParameters = this.ParseAdditionalApiParameters();
|
||||||
|
|
||||||
// Build the list of messages:
|
// Build the list of messages:
|
||||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
|
||||||
{
|
{
|
||||||
Role = n.Role switch
|
Role = n.Role switch
|
||||||
{
|
{
|
||||||
|
|||||||
@ -30,7 +30,7 @@ public sealed class ProviderHelmholtz() : BaseProvider("https://api.helmholtz-bl
|
|||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
// Prepare the system prompt:
|
// Prepare the system prompt:
|
||||||
var systemPrompt = new Message
|
var systemPrompt = new TextMessage
|
||||||
{
|
{
|
||||||
Role = "system",
|
Role = "system",
|
||||||
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
||||||
@ -40,7 +40,7 @@ public sealed class ProviderHelmholtz() : BaseProvider("https://api.helmholtz-bl
|
|||||||
var apiParameters = this.ParseAdditionalApiParameters();
|
var apiParameters = this.ParseAdditionalApiParameters();
|
||||||
|
|
||||||
// Build the list of messages:
|
// Build the list of messages:
|
||||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
|
||||||
{
|
{
|
||||||
Role = n.Role switch
|
Role = n.Role switch
|
||||||
{
|
{
|
||||||
|
|||||||
@ -35,7 +35,7 @@ public sealed class ProviderHuggingFace : BaseProvider
|
|||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
// Prepare the system prompt:
|
// Prepare the system prompt:
|
||||||
var systemPrompt = new Message
|
var systemPrompt = new TextMessage
|
||||||
{
|
{
|
||||||
Role = "system",
|
Role = "system",
|
||||||
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
||||||
@ -45,7 +45,7 @@ public sealed class ProviderHuggingFace : BaseProvider
|
|||||||
var apiParameters = this.ParseAdditionalApiParameters();
|
var apiParameters = this.ParseAdditionalApiParameters();
|
||||||
|
|
||||||
// Build the list of messages:
|
// Build the list of messages:
|
||||||
var message = await chatThread.Blocks.BuildMessages(async n => new Message
|
var message = await chatThread.Blocks.BuildMessages(async n => new TextMessage
|
||||||
{
|
{
|
||||||
Role = n.Role switch
|
Role = n.Role switch
|
||||||
{
|
{
|
||||||
|
|||||||
15
app/MindWork AI Studio/Provider/IMessage.cs
Normal file
15
app/MindWork AI Studio/Provider/IMessage.cs
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
namespace AIStudio.Provider;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Standard interface for messages exchanged with AI models.
|
||||||
|
/// </summary>
|
||||||
|
/// <typeparam name="T">The type of the message content.</typeparam>
|
||||||
|
public interface IMessage<T> : IMessageBase
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Gets the main content of the message exchanged with the AI model.
|
||||||
|
/// The content encapsulates the core information or data being transmitted,
|
||||||
|
/// and its type can vary based on the specific implementation or use case.
|
||||||
|
/// </summary>
|
||||||
|
public T Content { get; init; }
|
||||||
|
}
|
||||||
14
app/MindWork AI Studio/Provider/IMessageBase.cs
Normal file
14
app/MindWork AI Studio/Provider/IMessageBase.cs
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
namespace AIStudio.Provider;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// The none-generic base interface for messages exchanged with AI models.
|
||||||
|
/// </summary>
|
||||||
|
public interface IMessageBase
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Gets the role of the entity sending or receiving the message.
|
||||||
|
/// This property typically identifies whether the entity is acting
|
||||||
|
/// as a user, assistant, or system in the context of the interaction.
|
||||||
|
/// </summary>
|
||||||
|
public string Role { get; init; }
|
||||||
|
}
|
||||||
32
app/MindWork AI Studio/Provider/MessageBaseConverter.cs
Normal file
32
app/MindWork AI Studio/Provider/MessageBaseConverter.cs
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
using System.Text.Json;
|
||||||
|
using System.Text.Json.Serialization;
|
||||||
|
|
||||||
|
namespace AIStudio.Provider;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Custom JSON converter for the IMessageBase interface to handle polymorphic serialization.
|
||||||
|
/// </summary>
|
||||||
|
/// <remarks>
|
||||||
|
/// This converter ensures that when serializing IMessageBase objects, all properties
|
||||||
|
/// of the concrete implementation (e.g., TextMessage) are serialized, not just the
|
||||||
|
/// properties defined in the IMessageBase interface.
|
||||||
|
/// </remarks>
|
||||||
|
public sealed class MessageBaseConverter : JsonConverter<IMessageBase>
|
||||||
|
{
|
||||||
|
private static readonly ILogger<MessageBaseConverter> LOGGER = Program.LOGGER_FACTORY.CreateLogger<MessageBaseConverter>();
|
||||||
|
|
||||||
|
public override IMessageBase? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
|
||||||
|
{
|
||||||
|
// Deserialization is not needed for request objects, as messages are only serialized
|
||||||
|
// when sending requests to LLM providers.
|
||||||
|
LOGGER.LogError("Deserializing IMessageBase is not supported. This converter is only used for serializing request messages.");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public override void Write(Utf8JsonWriter writer, IMessageBase value, JsonSerializerOptions options)
|
||||||
|
{
|
||||||
|
// Serialize the actual concrete type (e.g., TextMessage) instead of just the IMessageBase interface.
|
||||||
|
// This ensures all properties of the concrete type are included in the JSON output.
|
||||||
|
JsonSerializer.Serialize(writer, value, value.GetType(), options);
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -12,7 +12,7 @@ namespace AIStudio.Provider.Mistral;
|
|||||||
/// <param name="SafePrompt">Whether to inject a safety prompt before all conversations.</param>
|
/// <param name="SafePrompt">Whether to inject a safety prompt before all conversations.</param>
|
||||||
public readonly record struct ChatRequest(
|
public readonly record struct ChatRequest(
|
||||||
string Model,
|
string Model,
|
||||||
IList<RegularMessage> Messages,
|
IList<IMessageBase> Messages,
|
||||||
bool Stream,
|
bool Stream,
|
||||||
int RandomSeed,
|
int RandomSeed,
|
||||||
bool SafePrompt = false
|
bool SafePrompt = false
|
||||||
|
|||||||
@ -28,7 +28,7 @@ public sealed class ProviderMistral() : BaseProvider("https://api.mistral.ai/v1/
|
|||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
// Prepare the system prompt:
|
// Prepare the system prompt:
|
||||||
var systemPrompt = new RegularMessage
|
var systemPrompt = new TextMessage
|
||||||
{
|
{
|
||||||
Role = "system",
|
Role = "system",
|
||||||
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
||||||
@ -38,7 +38,7 @@ public sealed class ProviderMistral() : BaseProvider("https://api.mistral.ai/v1/
|
|||||||
var apiParameters = this.ParseAdditionalApiParameters();
|
var apiParameters = this.ParseAdditionalApiParameters();
|
||||||
|
|
||||||
// Build the list of messages:
|
// Build the list of messages:
|
||||||
var messages = await chatThread.Blocks.BuildMessages(async n => new RegularMessage
|
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
|
||||||
{
|
{
|
||||||
Role = n.Role switch
|
Role = n.Role switch
|
||||||
{
|
{
|
||||||
|
|||||||
@ -1,8 +1,13 @@
|
|||||||
namespace AIStudio.Provider.Mistral;
|
namespace AIStudio.Provider.Mistral;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Regulat chat message model.
|
/// Text chat message model.
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="Content">The text content of the message.</param>
|
/// <param name="Content">The text content of the message.</param>
|
||||||
/// <param name="Role">The role of the message.</param>
|
/// <param name="Role">The role of the message.</param>
|
||||||
public readonly record struct RegularMessage(string Content, string Role);
|
public record TextMessage(string Content, string Role) : IMessage<string>
|
||||||
|
{
|
||||||
|
public TextMessage() : this(string.Empty, string.Empty)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -10,7 +10,7 @@ namespace AIStudio.Provider.OpenAI;
|
|||||||
/// <param name="Stream">Whether to stream the chat completion.</param>
|
/// <param name="Stream">Whether to stream the chat completion.</param>
|
||||||
public record ChatCompletionAPIRequest(
|
public record ChatCompletionAPIRequest(
|
||||||
string Model,
|
string Model,
|
||||||
IList<Message> Messages,
|
IList<IMessageBase> Messages,
|
||||||
bool Stream
|
bool Stream
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
|
|||||||
@ -70,7 +70,7 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
|
|||||||
LOGGER.LogInformation("Using the system prompt role '{SystemPromptRole}' and the '{RequestPath}' API for model '{ChatModelId}'.", systemPromptRole, requestPath, chatModel.Id);
|
LOGGER.LogInformation("Using the system prompt role '{SystemPromptRole}' and the '{RequestPath}' API for model '{ChatModelId}'.", systemPromptRole, requestPath, chatModel.Id);
|
||||||
|
|
||||||
// Prepare the system prompt:
|
// Prepare the system prompt:
|
||||||
var systemPrompt = new Message
|
var systemPrompt = new TextMessage
|
||||||
{
|
{
|
||||||
Role = systemPromptRole,
|
Role = systemPromptRole,
|
||||||
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
||||||
@ -90,7 +90,7 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
|
|||||||
var apiParameters = this.ParseAdditionalApiParameters("input", "store", "tools");
|
var apiParameters = this.ParseAdditionalApiParameters("input", "store", "tools");
|
||||||
|
|
||||||
// Build the list of messages:
|
// Build the list of messages:
|
||||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
|
||||||
{
|
{
|
||||||
Role = n.Role switch
|
Role = n.Role switch
|
||||||
{
|
{
|
||||||
@ -119,9 +119,7 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
|
|||||||
{
|
{
|
||||||
Model = chatModel.Id,
|
Model = chatModel.Id,
|
||||||
|
|
||||||
// Build the messages:
|
// All messages go into the messages field:
|
||||||
// - First of all the system prompt
|
|
||||||
// - Then none-empty user and AI messages
|
|
||||||
Messages = [systemPrompt, ..messages],
|
Messages = [systemPrompt, ..messages],
|
||||||
|
|
||||||
// Right now, we only support streaming completions:
|
// Right now, we only support streaming completions:
|
||||||
@ -134,27 +132,8 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
|
|||||||
{
|
{
|
||||||
Model = chatModel.Id,
|
Model = chatModel.Id,
|
||||||
|
|
||||||
// Build the messages:
|
// All messages go into the input field:
|
||||||
// - First of all the system prompt
|
Input = [systemPrompt, ..messages],
|
||||||
// - Then none-empty user and AI messages
|
|
||||||
Input = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
|
|
||||||
{
|
|
||||||
Role = n.Role switch
|
|
||||||
{
|
|
||||||
ChatRole.USER => "user",
|
|
||||||
ChatRole.AI => "assistant",
|
|
||||||
ChatRole.AGENT => "assistant",
|
|
||||||
ChatRole.SYSTEM => systemPromptRole,
|
|
||||||
|
|
||||||
_ => "user",
|
|
||||||
},
|
|
||||||
|
|
||||||
Content = n.Content switch
|
|
||||||
{
|
|
||||||
ContentText text => text.Text,
|
|
||||||
_ => string.Empty,
|
|
||||||
}
|
|
||||||
}).ToList()],
|
|
||||||
|
|
||||||
// Right now, we only support streaming completions:
|
// Right now, we only support streaming completions:
|
||||||
Stream = true,
|
Stream = true,
|
||||||
|
|||||||
@ -12,7 +12,7 @@ namespace AIStudio.Provider.OpenAI;
|
|||||||
/// <param name="Tools">The tools to use for the request.</param>
|
/// <param name="Tools">The tools to use for the request.</param>
|
||||||
public record ResponsesAPIRequest(
|
public record ResponsesAPIRequest(
|
||||||
string Model,
|
string Model,
|
||||||
IList<Message> Input,
|
IList<IMessageBase> Input,
|
||||||
bool Stream,
|
bool Stream,
|
||||||
bool Store,
|
bool Store,
|
||||||
IList<Tool> Tools)
|
IList<Tool> Tools)
|
||||||
|
|||||||
@ -5,4 +5,9 @@ namespace AIStudio.Provider.OpenAI;
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="Content">The text content of the message.</param>
|
/// <param name="Content">The text content of the message.</param>
|
||||||
/// <param name="Role">The role of the message.</param>
|
/// <param name="Role">The role of the message.</param>
|
||||||
public readonly record struct Message(string Content, string Role);
|
public record TextMessage(string Content, string Role) : IMessage<string>
|
||||||
|
{
|
||||||
|
public TextMessage() : this(string.Empty, string.Empty)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -33,7 +33,7 @@ public sealed class ProviderOpenRouter() : BaseProvider("https://openrouter.ai/a
|
|||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
// Prepare the system prompt:
|
// Prepare the system prompt:
|
||||||
var systemPrompt = new Message
|
var systemPrompt = new TextMessage
|
||||||
{
|
{
|
||||||
Role = "system",
|
Role = "system",
|
||||||
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
||||||
@ -43,7 +43,7 @@ public sealed class ProviderOpenRouter() : BaseProvider("https://openrouter.ai/a
|
|||||||
var apiParameters = this.ParseAdditionalApiParameters();
|
var apiParameters = this.ParseAdditionalApiParameters();
|
||||||
|
|
||||||
// Build the list of messages:
|
// Build the list of messages:
|
||||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
|
||||||
{
|
{
|
||||||
Role = n.Role switch
|
Role = n.Role switch
|
||||||
{
|
{
|
||||||
|
|||||||
@ -39,7 +39,7 @@ public sealed class ProviderPerplexity() : BaseProvider("https://api.perplexity.
|
|||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
// Prepare the system prompt:
|
// Prepare the system prompt:
|
||||||
var systemPrompt = new Message
|
var systemPrompt = new TextMessage
|
||||||
{
|
{
|
||||||
Role = "system",
|
Role = "system",
|
||||||
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
||||||
@ -49,7 +49,7 @@ public sealed class ProviderPerplexity() : BaseProvider("https://api.perplexity.
|
|||||||
var apiParameters = this.ParseAdditionalApiParameters();
|
var apiParameters = this.ParseAdditionalApiParameters();
|
||||||
|
|
||||||
// Build the list of messages:
|
// Build the list of messages:
|
||||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message()
|
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage()
|
||||||
{
|
{
|
||||||
Role = n.Role switch
|
Role = n.Role switch
|
||||||
{
|
{
|
||||||
|
|||||||
@ -10,7 +10,7 @@ namespace AIStudio.Provider.SelfHosted;
|
|||||||
/// <param name="Stream">Whether to stream the chat completion.</param>
|
/// <param name="Stream">Whether to stream the chat completion.</param>
|
||||||
public readonly record struct ChatRequest(
|
public readonly record struct ChatRequest(
|
||||||
string Model,
|
string Model,
|
||||||
IList<Message> Messages,
|
IList<IMessageBase> Messages,
|
||||||
bool Stream
|
bool Stream
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
|
|||||||
@ -26,7 +26,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
|
|||||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, isTrying: true);
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, isTrying: true);
|
||||||
|
|
||||||
// Prepare the system prompt:
|
// Prepare the system prompt:
|
||||||
var systemPrompt = new Message
|
var systemPrompt = new TextMessage
|
||||||
{
|
{
|
||||||
Role = "system",
|
Role = "system",
|
||||||
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
||||||
@ -36,7 +36,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
|
|||||||
var apiParameters = this.ParseAdditionalApiParameters();
|
var apiParameters = this.ParseAdditionalApiParameters();
|
||||||
|
|
||||||
// Build the list of messages:
|
// Build the list of messages:
|
||||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
|
||||||
{
|
{
|
||||||
Role = n.Role switch
|
Role = n.Role switch
|
||||||
{
|
{
|
||||||
|
|||||||
@ -5,4 +5,9 @@ namespace AIStudio.Provider.SelfHosted;
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="Content">The text content of the message.</param>
|
/// <param name="Content">The text content of the message.</param>
|
||||||
/// <param name="Role">The role of the message.</param>
|
/// <param name="Role">The role of the message.</param>
|
||||||
public readonly record struct Message(string Content, string Role);
|
public record TextMessage(string Content, string Role) : IMessage<string>
|
||||||
|
{
|
||||||
|
public TextMessage() : this(string.Empty, string.Empty)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -30,7 +30,7 @@ public sealed class ProviderX() : BaseProvider("https://api.x.ai/v1/", LOGGER)
|
|||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
// Prepare the system prompt:
|
// Prepare the system prompt:
|
||||||
var systemPrompt = new Message
|
var systemPrompt = new TextMessage
|
||||||
{
|
{
|
||||||
Role = "system",
|
Role = "system",
|
||||||
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
||||||
@ -40,7 +40,7 @@ public sealed class ProviderX() : BaseProvider("https://api.x.ai/v1/", LOGGER)
|
|||||||
var apiParameters = this.ParseAdditionalApiParameters();
|
var apiParameters = this.ParseAdditionalApiParameters();
|
||||||
|
|
||||||
// Build the list of messages:
|
// Build the list of messages:
|
||||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message()
|
var messages = await chatThread.Blocks.BuildMessages(async n => new TextMessage
|
||||||
{
|
{
|
||||||
Role = n.Role switch
|
Role = n.Role switch
|
||||||
{
|
{
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user