Add changes to the provider and add the AttachFile button

This commit is contained in:
hart_s3 2025-12-04 15:25:15 +01:00
parent 311092ec5e
commit 2ecc883cc8
26 changed files with 430 additions and 269 deletions

View File

@ -91,7 +91,7 @@ else
@T("Documents for the analysis") @T("Documents for the analysis")
</MudText> </MudText>
<AttachDocuments Name="Document Analysis Files Drop" @bind-DocumentPaths="@this.loadedDocumentPaths" CatchAllDocuments="true"/> <AttachDocuments Name="Document Analysis Files Drop" @bind-DocumentPaths="@this.loadedDocumentPaths" CatchAllDocuments="true" UseSmallForm="false"/>
</ExpansionPanel> </ExpansionPanel>
</MudExpansionPanels> </MudExpansionPanels>

View File

@ -1495,12 +1495,18 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T2928927510"] = "Videos
-- Images are not supported yet -- Images are not supported yet
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T298062956"] = "Images are not supported yet" UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T298062956"] = "Images are not supported yet"
-- Click to attach files
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T3521845090"] = "Click to attach files"
-- Clear file list -- Clear file list
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T3759696136"] = "Clear file list" UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T3759696136"] = "Clear file list"
-- Executables are not allowed -- Executables are not allowed
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T4167762413"] = "Executables are not allowed" UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T4167762413"] = "Executables are not allowed"
-- Attach file
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T572534842"] = "Attach file"
-- Select a file to attach -- Select a file to attach
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T595772870"] = "Select a file to attach" UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T595772870"] = "Select a file to attach"

View File

@ -30,6 +30,9 @@ public sealed class ContentImage : IContent, IImageSource
/// <inheritdoc /> /// <inheritdoc />
public List<Source> Sources { get; set; } = []; public List<Source> Sources { get; set; } = [];
/// <inheritdoc />
public List<string> FileAttachments { get; set; } = [];
/// <inheritdoc /> /// <inheritdoc />
public Task<ChatThread> CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastUserPrompt, ChatThread? chatChatThread, CancellationToken token = default) public Task<ChatThread> CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastUserPrompt, ChatThread? chatChatThread, CancellationToken token = default)
{ {

View File

@ -1,3 +1,4 @@
using System.Text;
using System.Text.Json.Serialization; using System.Text.Json.Serialization;
using AIStudio.Provider; using AIStudio.Provider;
@ -39,6 +40,9 @@ public sealed class ContentText : IContent
/// <inheritdoc /> /// <inheritdoc />
public List<Source> Sources { get; set; } = []; public List<Source> Sources { get; set; } = [];
/// <inheritdoc />
public List<string> FileAttachments { get; set; } = [];
/// <inheritdoc /> /// <inheritdoc />
public async Task<ChatThread> CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastUserPrompt, ChatThread? chatThread, CancellationToken token = default) public async Task<ChatThread> CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastUserPrompt, ChatThread? chatThread, CancellationToken token = default)
@ -139,9 +143,34 @@ public sealed class ContentText : IContent
Text = this.Text, Text = this.Text,
InitialRemoteWait = this.InitialRemoteWait, InitialRemoteWait = this.InitialRemoteWait,
IsStreaming = this.IsStreaming, IsStreaming = this.IsStreaming,
Sources = [..this.Sources],
FileAttachments = [..this.FileAttachments],
}; };
#endregion #endregion
public async Task<string> PrepareContentForAI()
{
var sb = new StringBuilder();
sb.AppendLine(this.Text);
if(this.FileAttachments.Count > 0)
{
sb.AppendLine();
sb.AppendLine("The following files are attached to this message:");
foreach(var file in this.FileAttachments)
{
sb.AppendLine();
sb.AppendLine("---------------------------------------");
sb.AppendLine($"File path: {file}");
sb.AppendLine("File content:");
sb.AppendLine("````");
sb.AppendLine(await Program.RUST_SERVICE.ReadArbitraryFileData(file, int.MaxValue));
sb.AppendLine("````");
}
}
return sb.ToString();
}
/// <summary> /// <summary>
/// The text content. /// The text content.

View File

@ -47,6 +47,13 @@ public interface IContent
/// </remarks> /// </remarks>
[JsonIgnore] [JsonIgnore]
public List<Source> Sources { get; set; } public List<Source> Sources { get; set; }
/// <summary>
/// Represents a collection of file attachments associated with the content.
/// This property contains a list of file paths that are appended
/// to the content to provide additional context or resources.
/// </summary>
public List<string> FileAttachments { get; set; }
/// <summary> /// <summary>
/// Uses the provider to create the content. /// Uses the provider to create the content.

View File

@ -0,0 +1,25 @@
namespace AIStudio.Chat;
public static class ListContentBlockExtensions
{
/// <summary>
/// Processes a list of content blocks by transforming them into a collection of message results asynchronously.
/// </summary>
/// <param name="blocks">The list of content blocks to process.</param>
/// <param name="transformer">A function that transforms each content block into a message result asynchronously.</param>
/// <typeparam name="TResult">The type of the result produced by the transformation function.</typeparam>
/// <returns>An asynchronous task that resolves to a list of transformed results.</returns>
public static async Task<IList<TResult>> BuildMessages<TResult>(this List<ContentBlock> blocks, Func<ContentBlock, Task<TResult>> transformer)
{
var messages = blocks
.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text))
.Select(transformer)
.ToList();
// Await all messages:
await Task.WhenAll(messages);
// Select all results:
return (IList<TResult>)messages.Select(n => n.Result);
}
}

View File

@ -1,20 +1,52 @@
@inherits MSGComponentBase @inherits MSGComponentBase
<div @onmouseenter="@this.OnMouseEnter" @onmouseleave="@this.OnMouseLeave"> @if (this.UseSmallForm)
<MudLink OnClick="@(() => this.AddFilesManually())" Style="text-decoration: none;"> {
<MudPaper Height="20em" Outlined="true" Class="@this.dragClass" Style="overflow-y: auto;"> <div @onmouseenter="@this.OnMouseEnter" @onmouseleave="@this.OnMouseLeave">
<MudText Typo="Typo.h6"> @{
@T("Drag and drop files here or click to attach documents.") var fileInfos = this.DocumentPaths.Select(file => new FileInfo(file)).ToList();
</MudText> }
@foreach (var fileInfo in this.DocumentPaths.Select(file => new FileInfo(file))) @if (fileInfos.Any())
{ {
<MudBadge Origin="Origin.TopCenter" Icon="@Icons.Material.Filled.Search" Color="Color.Primary" Overlap="true" Bordered="true" OnClick="@(() => this.InvestigateFile(@fileInfo))"> <MudBadge
<MudChip T="string" Color="Color.Dark" Text="@fileInfo.Name" tabindex="-1" Icon="@Icons.Material.Filled.Search" OnClick="@(() => this.InvestigateFile(@fileInfo))" OnClose="@(() => this.RemoveDocumentPathFromDocumentPaths(@fileInfo))"/> Content="@this.DocumentPaths.Count"
Color="Color.Primary"
Overlap="true">
<MudIconButton
Icon="@Icons.Material.Filled.AttachFile"
Color="Color.Default"
OnClick="@AddFilesManually"/>
</MudBadge> </MudBadge>
} }
</MudPaper> else
</MudLink> {
<MudButton OnClick="@(async () => await this.ClearAllFiles())" Variant="Variant.Filled" Color="Color.Info" Class="mt-2" StartIcon="@Icons.Material.Filled.Delete"> <MudTooltip Text="@T("Click to attach files")" Placement="@TOOLBAR_TOOLTIP_PLACEMENT">
@T("Clear file list") <MudIconButton
</MudButton> Icon="@Icons.Material.Filled.AttachFile"
</div> Color="Color.Default"
OnClick="@AddFilesManually"/>
</MudTooltip>
}
</div>
}
else
{
<div @onmouseenter="@this.OnMouseEnter" @onmouseleave="@this.OnMouseLeave">
<MudLink OnClick="@(() => this.AddFilesManually())" Style="text-decoration: none;">
<MudPaper Height="20em" Outlined="true" Class="@this.dragClass" Style="overflow-y: auto;">
<MudText Typo="Typo.h6">
@T("Drag and drop files here or click to attach documents.")
</MudText>
@foreach (var fileInfo in this.DocumentPaths.Select(file => new FileInfo(file)))
{
<MudBadge Origin="Origin.TopCenter" Icon="@Icons.Material.Filled.Search" Color="Color.Primary" Overlap="true" Bordered="true" OnClick="@(() => this.InvestigateFile(@fileInfo))">
<MudChip T="string" Color="Color.Dark" Text="@fileInfo.Name" tabindex="-1" Icon="@Icons.Material.Filled.Search" OnClick="@(() => this.InvestigateFile(@fileInfo))" OnClose="@(() => this.RemoveDocumentPathFromDocumentPaths(@fileInfo))"/>
</MudBadge>
}
</MudPaper>
</MudLink>
<MudButton OnClick="@(async () => await this.ClearAllFiles())" Variant="Variant.Filled" Color="Color.Info" Class="mt-2" StartIcon="@Icons.Material.Filled.Delete">
@T("Clear file list")
</MudButton>
</div>
}

View File

@ -28,6 +28,9 @@ public partial class AttachDocuments : MSGComponentBase
[Parameter] [Parameter]
public bool CatchAllDocuments { get; set; } public bool CatchAllDocuments { get; set; }
[Parameter]
public bool UseSmallForm { get; set; }
[Inject] [Inject]
private ILogger<AttachDocuments> Logger { get; set; } = null!; private ILogger<AttachDocuments> Logger { get; set; } = null!;
@ -36,6 +39,7 @@ public partial class AttachDocuments : MSGComponentBase
[Inject] [Inject]
private IDialogService DialogService { get; init; } = null!; private IDialogService DialogService { get; init; } = null!;
private const Placement TOOLBAR_TOOLTIP_PLACEMENT = Placement.Top;
#region Overrides of MSGComponentBase #region Overrides of MSGComponentBase

View File

@ -83,6 +83,8 @@
<ChatTemplateSelection CanChatThreadBeUsedForTemplate="@this.CanThreadBeSaved" CurrentChatThread="@this.ChatThread" CurrentChatTemplate="@this.currentChatTemplate" CurrentChatTemplateChanged="@this.ChatTemplateWasChanged"/> <ChatTemplateSelection CanChatThreadBeUsedForTemplate="@this.CanThreadBeSaved" CurrentChatThread="@this.ChatThread" CurrentChatTemplate="@this.currentChatTemplate" CurrentChatTemplateChanged="@this.ChatTemplateWasChanged"/>
<AttachDocuments Name="Document Files Drop" @bind-DocumentPaths="@this.chatDocumentPaths" CatchAllDocuments="true" UseSmallForm="true"/>
@if (this.SettingsManager.ConfigurationData.Workspace.StorageBehavior is WorkspaceStorageBehavior.STORE_CHATS_AUTOMATICALLY) @if (this.SettingsManager.ConfigurationData.Workspace.StorageBehavior is WorkspaceStorageBehavior.STORE_CHATS_AUTOMATICALLY)
{ {
<MudTooltip Text="@T("Delete this chat & start a new one.")" Placement="@TOOLBAR_TOOLTIP_PLACEMENT"> <MudTooltip Text="@T("Delete this chat & start a new one.")" Placement="@TOOLBAR_TOOLTIP_PLACEMENT">

View File

@ -57,6 +57,8 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
private string currentWorkspaceName = string.Empty; private string currentWorkspaceName = string.Empty;
private Guid currentWorkspaceId = Guid.Empty; private Guid currentWorkspaceId = Guid.Empty;
private CancellationTokenSource? cancellationTokenSource; private CancellationTokenSource? cancellationTokenSource;
private List<string> fileAttachments = new();
private HashSet<string> chatDocumentPaths = [];
// Unfortunately, we need the input field reference to blur the focus away. Without // Unfortunately, we need the input field reference to blur the focus away. Without
// this, we cannot clear the input field. // this, we cannot clear the input field.
@ -462,6 +464,7 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
lastUserPrompt = new ContentText lastUserPrompt = new ContentText
{ {
Text = this.userInput, Text = this.userInput,
FileAttachments = this.fileAttachments.ToList(), // Create a copy
}; };
// //
@ -507,6 +510,7 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
// Clear the input field: // Clear the input field:
await this.inputField.FocusAsync(); await this.inputField.FocusAsync();
this.userInput = string.Empty; this.userInput = string.Empty;
this.fileAttachments.Clear();
await this.inputField.BlurAsync(); await this.inputField.BlurAsync();
// Enable the stream state for the chat component: // Enable the stream state for the chat component:
@ -575,6 +579,7 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
this.hasUnsavedChanges = false; this.hasUnsavedChanges = false;
} }
private async Task StartNewChat(bool useSameWorkspace = false, bool deletePreviousChat = false) private async Task StartNewChat(bool useSameWorkspace = false, bool deletePreviousChat = false)
{ {
// //

View File

@ -23,7 +23,7 @@ public abstract class SettingsDialogBase : MSGComponentBase
protected readonly List<ConfigurationSelectData<string>> availableEmbeddingProviders = new(); protected readonly List<ConfigurationSelectData<string>> availableEmbeddingProviders = new();
#region Overrides of ComponentBase #region Overrides of ComponentBase
/// <inheritdoc /> /// <inheritdoc />
protected override async Task OnInitializedAsync() protected override async Task OnInitializedAsync()
{ {

View File

@ -13,7 +13,7 @@ public partial class SettingsDialogChatTemplate : SettingsDialogBase
public ChatThread? ExistingChatThread { get; set; } public ChatThread? ExistingChatThread { get; set; }
#region Overrides of ComponentBase #region Overrides of ComponentBase
/// <inheritdoc /> /// <inheritdoc />
protected override async Task OnInitializedAsync() protected override async Task OnInitializedAsync()
{ {

View File

@ -39,6 +39,26 @@ public sealed class ProviderAlibabaCloud() : BaseProvider("https://dashscope-int
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => await text.PrepareContentForAI(),
_ => string.Empty,
}
});
// Prepare the AlibabaCloud HTTP chat request: // Prepare the AlibabaCloud HTTP chat request:
var alibabaCloudChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest var alibabaCloudChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{ {
@ -47,24 +67,8 @@ public sealed class ProviderAlibabaCloud() : BaseProvider("https://dashscope-int
// Build the messages: // Build the messages:
// - First of all the system prompt // - First of all the system prompt
// - Then none-empty user and AI messages // - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message Messages = [systemPrompt, ..messages],
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => text.Text,
_ => string.Empty,
}
}).ToList()],
Stream = true, Stream = true,
AdditionalApiParameters = apiParameters AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS); }, JSON_SERIALIZER_OPTIONS);

View File

@ -30,29 +30,32 @@ public sealed class ProviderAnthropic() : BaseProvider("https://api.anthropic.co
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters("system"); var apiParameters = this.ParseAdditionalApiParameters("system");
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
_ => "user",
},
Content = n.Content switch
{
ContentText text => await text.PrepareContentForAI(),
_ => string.Empty,
}
});
// Prepare the Anthropic HTTP chat request: // Prepare the Anthropic HTTP chat request:
var chatRequest = JsonSerializer.Serialize(new ChatRequest var chatRequest = JsonSerializer.Serialize(new ChatRequest
{ {
Model = chatModel.Id, Model = chatModel.Id,
// Build the messages: // Build the messages:
Messages = [..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message Messages = [..messages],
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
_ => "user",
},
Content = n.Content switch
{
ContentText text => text.Text,
_ => string.Empty,
}
}).ToList()],
System = chatThread.PrepareSystemPrompt(settingsManager, chatThread), System = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
MaxTokens = apiParameters.TryGetValue("max_tokens", out var value) && value is int intValue ? intValue : 4_096, MaxTokens = apiParameters.TryGetValue("max_tokens", out var value) && value is int intValue ? intValue : 4_096,

View File

@ -39,6 +39,26 @@ public sealed class ProviderDeepSeek() : BaseProvider("https://api.deepseek.com/
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => await text.PrepareContentForAI(),
_ => string.Empty,
}
});
// Prepare the DeepSeek HTTP chat request: // Prepare the DeepSeek HTTP chat request:
var deepSeekChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest var deepSeekChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{ {
@ -47,24 +67,8 @@ public sealed class ProviderDeepSeek() : BaseProvider("https://api.deepseek.com/
// Build the messages: // Build the messages:
// - First of all the system prompt // - First of all the system prompt
// - Then none-empty user and AI messages // - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message Messages = [systemPrompt, ..messages],
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => text.Text,
_ => string.Empty,
}
}).ToList()],
Stream = true, Stream = true,
AdditionalApiParameters = apiParameters AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS); }, JSON_SERIALIZER_OPTIONS);

View File

@ -39,6 +39,26 @@ public class ProviderFireworks() : BaseProvider("https://api.fireworks.ai/infere
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => await text.PrepareContentForAI(),
_ => string.Empty,
}
});
// Prepare the Fireworks HTTP chat request: // Prepare the Fireworks HTTP chat request:
var fireworksChatRequest = JsonSerializer.Serialize(new ChatRequest var fireworksChatRequest = JsonSerializer.Serialize(new ChatRequest
{ {
@ -47,24 +67,7 @@ public class ProviderFireworks() : BaseProvider("https://api.fireworks.ai/infere
// Build the messages: // Build the messages:
// - First of all the system prompt // - First of all the system prompt
// - Then none-empty user and AI messages // - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message Messages = [systemPrompt, ..messages],
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => text.Text,
_ => string.Empty,
}
}).ToList()],
// Right now, we only support streaming completions: // Right now, we only support streaming completions:
Stream = true, Stream = true,

View File

@ -39,6 +39,26 @@ public sealed class ProviderGWDG() : BaseProvider("https://chat-ai.academiccloud
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => await text.PrepareContentForAI(),
_ => string.Empty,
}
});
// Prepare the GWDG HTTP chat request: // Prepare the GWDG HTTP chat request:
var gwdgChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest var gwdgChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{ {
@ -47,24 +67,8 @@ public sealed class ProviderGWDG() : BaseProvider("https://chat-ai.academiccloud
// Build the messages: // Build the messages:
// - First of all the system prompt // - First of all the system prompt
// - Then none-empty user and AI messages // - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message Messages = [systemPrompt, ..messages],
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => text.Text,
_ => string.Empty,
}
}).ToList()],
Stream = true, Stream = true,
AdditionalApiParameters = apiParameters AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS); }, JSON_SERIALIZER_OPTIONS);

View File

@ -39,6 +39,26 @@ public class ProviderGoogle() : BaseProvider("https://generativelanguage.googlea
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => await text.PrepareContentForAI(),
_ => string.Empty,
}
});
// Prepare the Google HTTP chat request: // Prepare the Google HTTP chat request:
var geminiChatRequest = JsonSerializer.Serialize(new ChatRequest var geminiChatRequest = JsonSerializer.Serialize(new ChatRequest
{ {
@ -47,24 +67,7 @@ public class ProviderGoogle() : BaseProvider("https://generativelanguage.googlea
// Build the messages: // Build the messages:
// - First of all the system prompt // - First of all the system prompt
// - Then none-empty user and AI messages // - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message Messages = [systemPrompt, ..messages],
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => text.Text,
_ => string.Empty,
}
}).ToList()],
// Right now, we only support streaming completions: // Right now, we only support streaming completions:
Stream = true, Stream = true,

View File

@ -39,6 +39,26 @@ public class ProviderGroq() : BaseProvider("https://api.groq.com/openai/v1/", LO
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => await text.PrepareContentForAI(),
_ => string.Empty,
}
});
// Prepare the OpenAI HTTP chat request: // Prepare the OpenAI HTTP chat request:
var groqChatRequest = JsonSerializer.Serialize(new ChatRequest var groqChatRequest = JsonSerializer.Serialize(new ChatRequest
{ {
@ -47,24 +67,7 @@ public class ProviderGroq() : BaseProvider("https://api.groq.com/openai/v1/", LO
// Build the messages: // Build the messages:
// - First of all the system prompt // - First of all the system prompt
// - Then none-empty user and AI messages // - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message Messages = [systemPrompt, ..messages],
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => text.Text,
_ => string.Empty,
}
}).ToList()],
// Right now, we only support streaming completions: // Right now, we only support streaming completions:
Stream = true, Stream = true,

View File

@ -38,6 +38,26 @@ public sealed class ProviderHelmholtz() : BaseProvider("https://api.helmholtz-bl
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => await text.PrepareContentForAI(),
_ => string.Empty,
}
});
// Prepare the Helmholtz HTTP chat request: // Prepare the Helmholtz HTTP chat request:
var helmholtzChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest var helmholtzChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
@ -47,24 +67,8 @@ public sealed class ProviderHelmholtz() : BaseProvider("https://api.helmholtz-bl
// Build the messages: // Build the messages:
// - First of all the system prompt // - First of all the system prompt
// - Then none-empty user and AI messages // - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message Messages = [systemPrompt, ..messages],
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => text.Text,
_ => string.Empty,
}
}).ToList()],
Stream = true, Stream = true,
AdditionalApiParameters = apiParameters AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS); }, JSON_SERIALIZER_OPTIONS);

View File

@ -44,6 +44,26 @@ public sealed class ProviderHuggingFace : BaseProvider
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var message = await chatThread.Blocks.BuildMessages(async n => new Message
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => await text.PrepareContentForAI(),
_ => string.Empty,
}
});
// Prepare the HuggingFace HTTP chat request: // Prepare the HuggingFace HTTP chat request:
var huggingfaceChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest var huggingfaceChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{ {
@ -52,24 +72,8 @@ public sealed class ProviderHuggingFace : BaseProvider
// Build the messages: // Build the messages:
// - First of all the system prompt // - First of all the system prompt
// - Then none-empty user and AI messages // - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message Messages = [systemPrompt, ..message],
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => text.Text,
_ => string.Empty,
}
}).ToList()],
Stream = true, Stream = true,
AdditionalApiParameters = apiParameters AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS); }, JSON_SERIALIZER_OPTIONS);

View File

@ -36,6 +36,26 @@ public sealed class ProviderMistral() : BaseProvider("https://api.mistral.ai/v1/
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new RegularMessage
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => await text.PrepareContentForAI(),
_ => string.Empty,
}
});
// Prepare the Mistral HTTP chat request: // Prepare the Mistral HTTP chat request:
var mistralChatRequest = JsonSerializer.Serialize(new ChatRequest var mistralChatRequest = JsonSerializer.Serialize(new ChatRequest
@ -45,24 +65,7 @@ public sealed class ProviderMistral() : BaseProvider("https://api.mistral.ai/v1/
// Build the messages: // Build the messages:
// - First of all the system prompt // - First of all the system prompt
// - Then none-empty user and AI messages // - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new RegularMessage Messages = [systemPrompt, ..messages],
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => text.Text,
_ => string.Empty,
}
}).ToList()],
// Right now, we only support streaming completions: // Right now, we only support streaming completions:
Stream = true, Stream = true,
@ -70,6 +73,7 @@ public sealed class ProviderMistral() : BaseProvider("https://api.mistral.ai/v1/
AdditionalApiParameters = apiParameters AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS); }, JSON_SERIALIZER_OPTIONS);
async Task<HttpRequestMessage> RequestBuilder() async Task<HttpRequestMessage> RequestBuilder()
{ {
// Build the HTTP post request: // Build the HTTP post request:

View File

@ -88,6 +88,26 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters("input", "store", "tools"); var apiParameters = this.ParseAdditionalApiParameters("input", "store", "tools");
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => systemPromptRole,
_ => "user",
},
Content = n.Content switch
{
ContentText text => await text.PrepareContentForAI(),
_ => string.Empty,
}
});
// //
// Create the request: either for the Responses API or the Chat Completion API // Create the request: either for the Responses API or the Chat Completion API
@ -102,24 +122,7 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
// Build the messages: // Build the messages:
// - First of all the system prompt // - First of all the system prompt
// - Then none-empty user and AI messages // - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message Messages = [systemPrompt, ..messages],
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => systemPromptRole,
_ => "user",
},
Content = n.Content switch
{
ContentText text => text.Text,
_ => string.Empty,
}
}).ToList()],
// Right now, we only support streaming completions: // Right now, we only support streaming completions:
Stream = true, Stream = true,

View File

@ -48,6 +48,26 @@ public sealed class ProviderPerplexity() : BaseProvider("https://api.perplexity.
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message()
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => await text.PrepareContentForAI(),
_ => string.Empty,
}
});
// Prepare the Perplexity HTTP chat request: // Prepare the Perplexity HTTP chat request:
var perplexityChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest var perplexityChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{ {
@ -56,24 +76,7 @@ public sealed class ProviderPerplexity() : BaseProvider("https://api.perplexity.
// Build the messages: // Build the messages:
// - First of all the system prompt // - First of all the system prompt
// - Then none-empty user and AI messages // - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message Messages = [systemPrompt, ..messages],
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => text.Text,
_ => string.Empty,
}
}).ToList()],
Stream = true, Stream = true,
AdditionalApiParameters = apiParameters AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS); }, JSON_SERIALIZER_OPTIONS);

View File

@ -35,6 +35,26 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => await text.PrepareContentForAI(),
_ => string.Empty,
}
});
// Prepare the OpenAI HTTP chat request: // Prepare the OpenAI HTTP chat request:
var providerChatRequest = JsonSerializer.Serialize(new ChatRequest var providerChatRequest = JsonSerializer.Serialize(new ChatRequest
{ {
@ -43,24 +63,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
// Build the messages: // Build the messages:
// - First of all the system prompt // - First of all the system prompt
// - Then none-empty user and AI messages // - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message Messages = [systemPrompt, ..messages],
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => text.Text,
_ => string.Empty,
}
}).ToList()],
// Right now, we only support streaming completions: // Right now, we only support streaming completions:
Stream = true, Stream = true,

View File

@ -39,6 +39,26 @@ public sealed class ProviderX() : BaseProvider("https://api.x.ai/v1/", LOGGER)
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message()
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => await text.PrepareContentForAI(),
_ => string.Empty,
}
});
// Prepare the xAI HTTP chat request: // Prepare the xAI HTTP chat request:
var xChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest var xChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{ {
@ -47,24 +67,7 @@ public sealed class ProviderX() : BaseProvider("https://api.x.ai/v1/", LOGGER)
// Build the messages: // Build the messages:
// - First of all the system prompt // - First of all the system prompt
// - Then none-empty user and AI messages // - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message Messages = [systemPrompt, ..messages],
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => text.Text,
_ => string.Empty,
}
}).ToList()],
// Right now, we only support streaming completions: // Right now, we only support streaming completions:
Stream = true, Stream = true,