Added the possibility to attach files to the chat (#585)
Some checks are pending
Build and Release / Read metadata (push) Waiting to run
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-apple-darwin, osx-arm64, macos-latest, aarch64-apple-darwin, dmg updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-pc-windows-msvc.exe, win-arm64, windows-latest, aarch64-pc-windows-msvc, nsis updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-unknown-linux-gnu, linux-arm64, ubuntu-22.04-arm, aarch64-unknown-linux-gnu, appimage deb updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-apple-darwin, osx-x64, macos-latest, x86_64-apple-darwin, dmg updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-pc-windows-msvc.exe, win-x64, windows-latest, x86_64-pc-windows-msvc, nsis updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-unknown-linux-gnu, linux-x64, ubuntu-22.04, x86_64-unknown-linux-gnu, appimage deb updater) (push) Blocked by required conditions
Build and Release / Prepare & create release (push) Blocked by required conditions
Build and Release / Publish release (push) Blocked by required conditions

Co-authored-by: Thorsten Sommer
This commit is contained in:
Sabrina-devops 2025-12-10 13:48:13 +01:00 committed by GitHub
parent 0aff45eca3
commit f521c11a60
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
33 changed files with 579 additions and 280 deletions

View File

@ -103,7 +103,7 @@ else
@T("Documents for the analysis") @T("Documents for the analysis")
</MudText> </MudText>
<AttachDocuments Name="Document Analysis Files Drop" @bind-DocumentPaths="@this.loadedDocumentPaths" CatchAllDocuments="true"/> <AttachDocuments Name="Document Analysis Files" @bind-DocumentPaths="@this.loadedDocumentPaths" CatchAllDocuments="true" UseSmallForm="false"/>
</ExpansionPanel> </ExpansionPanel>
</MudExpansionPanels> </MudExpansionPanels>

View File

@ -1468,6 +1468,9 @@ UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T2093355991"] = "Removes
-- Regenerate Message -- Regenerate Message
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T2308444540"] = "Regenerate Message" UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T2308444540"] = "Regenerate Message"
-- Number of attachments
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T3018847255"] = "Number of attachments"
-- Cannot render content of type {0} yet. -- Cannot render content of type {0} yet.
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T3175548294"] = "Cannot render content of type {0} yet." UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T3175548294"] = "Cannot render content of type {0} yet."
@ -1504,6 +1507,9 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T2928927510"] = "Videos
-- Images are not supported yet -- Images are not supported yet
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T298062956"] = "Images are not supported yet" UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T298062956"] = "Images are not supported yet"
-- Click to attach files
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T3521845090"] = "Click to attach files"
-- Clear file list -- Clear file list
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T3759696136"] = "Clear file list" UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T3759696136"] = "Clear file list"

View File

@ -16,11 +16,19 @@
</MudText> </MudText>
</CardHeaderContent> </CardHeaderContent>
<CardHeaderActions> <CardHeaderActions>
@if (this.Content.FileAttachments.Count > 0)
{
<MudTooltip Text="@T("Number of attachments")" Placement="Placement.Bottom">
<MudBadge Content="@this.Content.FileAttachments.Count" Color="Color.Primary" Overlap="true" BadgeClass="sources-card-header">
<MudIconButton Icon="@Icons.Material.Filled.AttachFile" />
</MudBadge>
</MudTooltip>
}
@if (this.Content.Sources.Count > 0) @if (this.Content.Sources.Count > 0)
{ {
<MudTooltip Text="@T("Number of sources")" Placement="Placement.Bottom"> <MudTooltip Text="@T("Number of sources")" Placement="Placement.Bottom">
<MudBadge Content="@this.Content.Sources.Count" Color="Color.Primary" Overlap="true" BadgeClass="sources-card-header"> <MudBadge Content="@this.Content.Sources.Count" Color="Color.Primary" Overlap="true" BadgeClass="sources-card-header">
<MudIconButton Icon="@Icons.Material.Filled.Link" /> <MudIconButton Icon="@Icons.Material.Filled.Link"/>
</MudBadge> </MudBadge>
</MudTooltip> </MudTooltip>
} }

View File

@ -30,6 +30,9 @@ public sealed class ContentImage : IContent, IImageSource
/// <inheritdoc /> /// <inheritdoc />
public List<Source> Sources { get; set; } = []; public List<Source> Sources { get; set; } = [];
/// <inheritdoc />
public List<string> FileAttachments { get; set; } = [];
/// <inheritdoc /> /// <inheritdoc />
public Task<ChatThread> CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastUserPrompt, ChatThread? chatChatThread, CancellationToken token = default) public Task<ChatThread> CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastUserPrompt, ChatThread? chatChatThread, CancellationToken token = default)
{ {

View File

@ -1,3 +1,4 @@
using System.Text;
using System.Text.Json.Serialization; using System.Text.Json.Serialization;
using AIStudio.Provider; using AIStudio.Provider;
@ -40,6 +41,9 @@ public sealed class ContentText : IContent
/// <inheritdoc /> /// <inheritdoc />
public List<Source> Sources { get; set; } = []; public List<Source> Sources { get; set; } = [];
/// <inheritdoc />
public List<string> FileAttachments { get; set; } = [];
/// <inheritdoc /> /// <inheritdoc />
public async Task<ChatThread> CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastUserPrompt, ChatThread? chatThread, CancellationToken token = default) public async Task<ChatThread> CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastUserPrompt, ChatThread? chatThread, CancellationToken token = default)
{ {
@ -139,10 +143,45 @@ public sealed class ContentText : IContent
Text = this.Text, Text = this.Text,
InitialRemoteWait = this.InitialRemoteWait, InitialRemoteWait = this.InitialRemoteWait,
IsStreaming = this.IsStreaming, IsStreaming = this.IsStreaming,
Sources = [..this.Sources],
FileAttachments = [..this.FileAttachments],
}; };
#endregion #endregion
public async Task<string> PrepareContentForAI()
{
var sb = new StringBuilder();
sb.AppendLine(this.Text);
if(this.FileAttachments.Count > 0)
{
// Check Pandoc availability once before processing file attachments
var pandocState = await Pandoc.CheckAvailabilityAsync(Program.RUST_SERVICE, showMessages: true, showSuccessMessage: false);
if (!pandocState.IsAvailable)
LOGGER.LogWarning("File attachments could not be processed because Pandoc is not available.");
else if (!pandocState.CheckWasSuccessful)
LOGGER.LogWarning("File attachments could not be processed because the Pandoc version check failed.");
else
{
sb.AppendLine();
sb.AppendLine("The following files are attached to this message:");
foreach(var file in this.FileAttachments)
{
sb.AppendLine();
sb.AppendLine("---------------------------------------");
sb.AppendLine($"File path: {file}");
sb.AppendLine("File content:");
sb.AppendLine("````");
sb.AppendLine(await Program.RUST_SERVICE.ReadArbitraryFileData(file, int.MaxValue));
sb.AppendLine("````");
}
}
}
return sb.ToString();
}
/// <summary> /// <summary>
/// The text content. /// The text content.
/// </summary> /// </summary>

View File

@ -48,6 +48,13 @@ public interface IContent
[JsonIgnore] [JsonIgnore]
public List<Source> Sources { get; set; } public List<Source> Sources { get; set; }
/// <summary>
/// Represents a collection of file attachments associated with the content.
/// This property contains a list of file paths that are appended
/// to the content to provide additional context or resources.
/// </summary>
public List<string> FileAttachments { get; set; }
/// <summary> /// <summary>
/// Uses the provider to create the content. /// Uses the provider to create the content.
/// </summary> /// </summary>

View File

@ -0,0 +1,25 @@
namespace AIStudio.Chat;
public static class ListContentBlockExtensions
{
/// <summary>
/// Processes a list of content blocks by transforming them into a collection of message results asynchronously.
/// </summary>
/// <param name="blocks">The list of content blocks to process.</param>
/// <param name="transformer">A function that transforms each content block into a message result asynchronously.</param>
/// <typeparam name="TResult">The type of the result produced by the transformation function.</typeparam>
/// <returns>An asynchronous task that resolves to a list of transformed results.</returns>
public static async Task<IList<TResult>> BuildMessages<TResult>(this List<ContentBlock> blocks, Func<ContentBlock, Task<TResult>> transformer)
{
var messages = blocks
.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text))
.Select(transformer)
.ToList();
// Await all messages:
await Task.WhenAll(messages);
// Select all results:
return messages.Select(n => n.Result).ToList();
}
}

View File

@ -1,27 +1,59 @@
@inherits MSGComponentBase @inherits MSGComponentBase
<MudStack Row="true" AlignItems="AlignItems.Center" StretchItems="StretchItems.None" Wrap="Wrap.Wrap"> @if (this.UseSmallForm)
{
<div @onmouseenter="@this.OnMouseEnter" @onmouseleave="@this.OnMouseLeave">
@{
var fileInfos = this.DocumentPaths.Select(file => new FileInfo(file)).ToList();
}
@if (fileInfos.Any())
{
<MudBadge
Content="@this.DocumentPaths.Count"
Color="Color.Primary"
Overlap="true">
<MudIconButton
Icon="@Icons.Material.Filled.AttachFile"
Color="Color.Default"
OnClick="@AddFilesManually"/>
</MudBadge>
}
else
{
<MudTooltip Text="@T("Click to attach files")" Placement="@TOOLBAR_TOOLTIP_PLACEMENT">
<MudIconButton
Icon="@Icons.Material.Filled.AttachFile"
Color="Color.Default"
OnClick="@AddFilesManually"/>
</MudTooltip>
}
</div>
}
else
{
<MudStack Row="true" AlignItems="AlignItems.Center" StretchItems="StretchItems.None" Wrap="Wrap.Wrap">
<MudText Typo="Typo.body1" Inline="true"> <MudText Typo="Typo.body1" Inline="true">
@T("Drag and drop files into the marked area or click here to attach documents: ") @T("Drag and drop files into the marked area or click here to attach documents: ")
</MudText> </MudText>
<MudButton <MudButton
Variant="Variant.Filled" Variant="Variant.Filled"
Icon="@Icons.Material.Filled.Add" StartIcon="@Icons.Material.Filled.Add"
Color="Color.Primary" Color="Color.Primary"
OnClick="@(() => this.AddFilesManually())" OnClick="@(() => this.AddFilesManually())"
Style="vertical-align: top; margin-top: -2px;" Style="vertical-align: top; margin-top: -2px;"
Size="Size.Small" Size="Size.Small">
T>@T("Add file") @T("Add file")
</MudButton> </MudButton>
</MudStack> </MudStack>
<div @onmouseenter="@this.OnMouseEnter" @onmouseleave="@this.OnMouseLeave"> <div @onmouseenter="@this.OnMouseEnter" @onmouseleave="@this.OnMouseLeave">
<MudPaper Height="20em" Outlined="true" Class="@this.dragClass" Style="overflow-y: auto;"> <MudPaper Height="20em" Outlined="true" Class="@this.dragClass" Style="overflow-y: auto;">
@foreach (var fileInfo in this.DocumentPaths.Select(file => new FileInfo(file))) @foreach (var fileInfo in this.DocumentPaths.Select(file => new FileInfo(file)))
{ {
<MudChip T="string" Color="Color.Dark" Text="@fileInfo.Name" tabindex="-1" Icon="@Icons.Material.Filled.Search" OnClick="@(() => this.InvestigateFile(@fileInfo))" OnClose="@(() => this.RemoveDocumentPathFromDocumentPaths(@fileInfo))"/> <MudChip T="string" Color="Color.Dark" Text="@fileInfo.Name" tabindex="-1" Icon="@Icons.Material.Filled.Search" OnClick="@(() => this.InvestigateFile(@fileInfo))" OnClose="@(() => this.RemoveDocumentPathFromDocumentPaths(@fileInfo))"/>
} }
</MudPaper> </MudPaper>
</div> </div>
<MudButton OnClick="@(async () => await this.ClearAllFiles())" Variant="Variant.Filled" Color="Color.Info" Class="mt-2" StartIcon="@Icons.Material.Filled.Delete"> <MudButton OnClick="@(async () => await this.ClearAllFiles())" Variant="Variant.Filled" Color="Color.Info" Class="mt-2" StartIcon="@Icons.Material.Filled.Delete">
@T("Clear file list") @T("Clear file list")
</MudButton> </MudButton>
}

View File

@ -28,6 +28,9 @@ public partial class AttachDocuments : MSGComponentBase
[Parameter] [Parameter]
public bool CatchAllDocuments { get; set; } public bool CatchAllDocuments { get; set; }
[Parameter]
public bool UseSmallForm { get; set; }
[Inject] [Inject]
private ILogger<AttachDocuments> Logger { get; set; } = null!; private ILogger<AttachDocuments> Logger { get; set; } = null!;
@ -37,6 +40,8 @@ public partial class AttachDocuments : MSGComponentBase
[Inject] [Inject]
private IDialogService DialogService { get; init; } = null!; private IDialogService DialogService { get; init; } = null!;
private const Placement TOOLBAR_TOOLTIP_PLACEMENT = Placement.Top;
#region Overrides of MSGComponentBase #region Overrides of MSGComponentBase
protected override async Task OnInitializedAsync() protected override async Task OnInitializedAsync()

View File

@ -83,6 +83,11 @@
<ChatTemplateSelection CanChatThreadBeUsedForTemplate="@this.CanThreadBeSaved" CurrentChatThread="@this.ChatThread" CurrentChatTemplate="@this.currentChatTemplate" CurrentChatTemplateChanged="@this.ChatTemplateWasChanged"/> <ChatTemplateSelection CanChatThreadBeUsedForTemplate="@this.CanThreadBeSaved" CurrentChatThread="@this.ChatThread" CurrentChatTemplate="@this.currentChatTemplate" CurrentChatTemplateChanged="@this.ChatTemplateWasChanged"/>
@if (this.isPandocAvailable)
{
<AttachDocuments Name="File Attachments" @bind-DocumentPaths="@this.chatDocumentPaths" CatchAllDocuments="true" UseSmallForm="true"/>
}
@if (this.SettingsManager.ConfigurationData.Workspace.StorageBehavior is WorkspaceStorageBehavior.STORE_CHATS_AUTOMATICALLY) @if (this.SettingsManager.ConfigurationData.Workspace.StorageBehavior is WorkspaceStorageBehavior.STORE_CHATS_AUTOMATICALLY)
{ {
<MudTooltip Text="@T("Delete this chat & start a new one.")" Placement="@TOOLBAR_TOOLTIP_PLACEMENT"> <MudTooltip Text="@T("Delete this chat & start a new one.")" Placement="@TOOLBAR_TOOLTIP_PLACEMENT">

View File

@ -3,6 +3,7 @@ using AIStudio.Dialogs;
using AIStudio.Provider; using AIStudio.Provider;
using AIStudio.Settings; using AIStudio.Settings;
using AIStudio.Settings.DataModel; using AIStudio.Settings.DataModel;
using AIStudio.Tools.Services;
using Microsoft.AspNetCore.Components; using Microsoft.AspNetCore.Components;
using Microsoft.AspNetCore.Components.Web; using Microsoft.AspNetCore.Components.Web;
@ -37,6 +38,9 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
[Inject] [Inject]
private IDialogService DialogService { get; init; } = null!; private IDialogService DialogService { get; init; } = null!;
[Inject]
private PandocAvailabilityService PandocAvailabilityService { get; init; } = null!;
private const Placement TOOLBAR_TOOLTIP_PLACEMENT = Placement.Top; private const Placement TOOLBAR_TOOLTIP_PLACEMENT = Placement.Top;
private static readonly Dictionary<string, object?> USER_INPUT_ATTRIBUTES = new(); private static readonly Dictionary<string, object?> USER_INPUT_ATTRIBUTES = new();
@ -57,6 +61,8 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
private string currentWorkspaceName = string.Empty; private string currentWorkspaceName = string.Empty;
private Guid currentWorkspaceId = Guid.Empty; private Guid currentWorkspaceId = Guid.Empty;
private CancellationTokenSource? cancellationTokenSource; private CancellationTokenSource? cancellationTokenSource;
private HashSet<string> chatDocumentPaths = [];
private bool isPandocAvailable;
// Unfortunately, we need the input field reference to blur the focus away. Without // Unfortunately, we need the input field reference to blur the focus away. Without
// this, we cannot clear the input field. // this, we cannot clear the input field.
@ -197,6 +203,10 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
// Select the correct provider: // Select the correct provider:
await this.SelectProviderWhenLoadingChat(); await this.SelectProviderWhenLoadingChat();
// Check if Pandoc is available (no dialog or messages):
this.isPandocAvailable = await this.PandocAvailabilityService.IsAvailableAsync();
await base.OnInitializedAsync(); await base.OnInitializedAsync();
} }
@ -462,6 +472,7 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
lastUserPrompt = new ContentText lastUserPrompt = new ContentText
{ {
Text = this.userInput, Text = this.userInput,
FileAttachments = this.chatDocumentPaths.ToList(),
}; };
// //
@ -507,6 +518,7 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
// Clear the input field: // Clear the input field:
await this.inputField.FocusAsync(); await this.inputField.FocusAsync();
this.userInput = string.Empty; this.userInput = string.Empty;
this.chatDocumentPaths.Clear();
await this.inputField.BlurAsync(); await this.inputField.BlurAsync();
// Enable the stream state for the chat component: // Enable the stream state for the chat component:

View File

@ -25,6 +25,9 @@ public partial class ReadFileContent : MSGComponentBase
[Inject] [Inject]
private ILogger<ReadFileContent> Logger { get; init; } = null!; private ILogger<ReadFileContent> Logger { get; init; } = null!;
[Inject]
private PandocAvailabilityService PandocAvailabilityService { get; init; } = null!;
private async Task SelectFile() private async Task SelectFile()
{ {
var selectedFile = await this.RustService.SelectFile(T("Select file to read its content")); var selectedFile = await this.RustService.SelectFile(T("Select file to read its content"));
@ -62,6 +65,11 @@ public partial class ReadFileContent : MSGComponentBase
return; return;
} }
// Ensure that Pandoc is installed and ready:
await this.PandocAvailabilityService.EnsureAvailabilityAsync(
showSuccessMessage: false,
showDialog: true);
try try
{ {
var fileContent = await UserFile.LoadFileData(selectedFile.SelectedFilePath, this.RustService, this.DialogService); var fileContent = await UserFile.LoadFileData(selectedFile.SelectedFilePath, this.RustService, this.DialogService);

View File

@ -1470,6 +1470,9 @@ UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T2093355991"] = "Entfern
-- Regenerate Message -- Regenerate Message
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T2308444540"] = "Nachricht neu erstellen" UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T2308444540"] = "Nachricht neu erstellen"
-- Number of attachments
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T3018847255"] = "Anzahl der Anhänge"
-- Cannot render content of type {0} yet. -- Cannot render content of type {0} yet.
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T3175548294"] = "Der Inhaltstyp {0} kann noch nicht angezeigt werden." UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T3175548294"] = "Der Inhaltstyp {0} kann noch nicht angezeigt werden."
@ -1506,6 +1509,9 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T2928927510"] = "Videos
-- Images are not supported yet -- Images are not supported yet
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T298062956"] = "Bilder werden noch nicht unterstützt." UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T298062956"] = "Bilder werden noch nicht unterstützt."
-- Click to attach files
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T3521845090"] = "Klicken, um Dateien anzuhängen"
-- Clear file list -- Clear file list
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T3759696136"] = "Dateiliste löschen" UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T3759696136"] = "Dateiliste löschen"
@ -6032,3 +6038,4 @@ UI_TEXT_CONTENT["AISTUDIO::TOOLS::WORKSPACEBEHAVIOUR::T1307384014"] = "Unbenannt
-- Delete Chat -- Delete Chat
UI_TEXT_CONTENT["AISTUDIO::TOOLS::WORKSPACEBEHAVIOUR::T2244038752"] = "Chat löschen" UI_TEXT_CONTENT["AISTUDIO::TOOLS::WORKSPACEBEHAVIOUR::T2244038752"] = "Chat löschen"

View File

@ -1470,6 +1470,9 @@ UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T2093355991"] = "Removes
-- Regenerate Message -- Regenerate Message
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T2308444540"] = "Regenerate Message" UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T2308444540"] = "Regenerate Message"
-- Number of attachments
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T3018847255"] = "Number of attachments"
-- Cannot render content of type {0} yet. -- Cannot render content of type {0} yet.
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T3175548294"] = "Cannot render content of type {0} yet." UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T3175548294"] = "Cannot render content of type {0} yet."
@ -1506,6 +1509,9 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T2928927510"] = "Videos
-- Images are not supported yet -- Images are not supported yet
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T298062956"] = "Images are not supported yet" UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T298062956"] = "Images are not supported yet"
-- Click to attach files
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T3521845090"] = "Click to attach files"
-- Clear file list -- Clear file list
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T3759696136"] = "Clear file list" UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T3759696136"] = "Clear file list"
@ -6032,3 +6038,4 @@ UI_TEXT_CONTENT["AISTUDIO::TOOLS::WORKSPACEBEHAVIOUR::T1307384014"] = "Unnamed w
-- Delete Chat -- Delete Chat
UI_TEXT_CONTENT["AISTUDIO::TOOLS::WORKSPACEBEHAVIOUR::T2244038752"] = "Delete Chat" UI_TEXT_CONTENT["AISTUDIO::TOOLS::WORKSPACEBEHAVIOUR::T2244038752"] = "Delete Chat"

View File

@ -126,6 +126,7 @@ internal sealed class Program
builder.Services.AddSingleton<SettingsManager>(); builder.Services.AddSingleton<SettingsManager>();
builder.Services.AddSingleton<ThreadSafeRandom>(); builder.Services.AddSingleton<ThreadSafeRandom>();
builder.Services.AddSingleton<DataSourceService>(); builder.Services.AddSingleton<DataSourceService>();
builder.Services.AddScoped<PandocAvailabilityService>();
builder.Services.AddTransient<HTMLParser>(); builder.Services.AddTransient<HTMLParser>();
builder.Services.AddTransient<AgentDataSourceSelection>(); builder.Services.AddTransient<AgentDataSourceSelection>();
builder.Services.AddTransient<AgentRetrievalContextValidation>(); builder.Services.AddTransient<AgentRetrievalContextValidation>();

View File

@ -39,15 +39,8 @@ public sealed class ProviderAlibabaCloud() : BaseProvider("https://dashscope-int
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the AlibabaCloud HTTP chat request: // Build the list of messages:
var alibabaCloudChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest var messages = await chatThread.Blocks.BuildMessages(async n => new Message
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
{ {
Role = n.Role switch Role = n.Role switch
{ {
@ -61,10 +54,21 @@ public sealed class ProviderAlibabaCloud() : BaseProvider("https://dashscope-int
Content = n.Content switch Content = n.Content switch
{ {
ContentText text => text.Text, ContentText text => await text.PrepareContentForAI(),
_ => string.Empty, _ => string.Empty,
} }
}).ToList()], });
// Prepare the AlibabaCloud HTTP chat request:
var alibabaCloudChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..messages],
Stream = true, Stream = true,
AdditionalApiParameters = apiParameters AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS); }, JSON_SERIALIZER_OPTIONS);

View File

@ -30,13 +30,8 @@ public sealed class ProviderAnthropic() : BaseProvider("https://api.anthropic.co
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters("system"); var apiParameters = this.ParseAdditionalApiParameters("system");
// Prepare the Anthropic HTTP chat request: // Build the list of messages:
var chatRequest = JsonSerializer.Serialize(new ChatRequest var messages = await chatThread.Blocks.BuildMessages(async n => new Message
{
Model = chatModel.Id,
// Build the messages:
Messages = [..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
{ {
Role = n.Role switch Role = n.Role switch
{ {
@ -49,10 +44,18 @@ public sealed class ProviderAnthropic() : BaseProvider("https://api.anthropic.co
Content = n.Content switch Content = n.Content switch
{ {
ContentText text => text.Text, ContentText text => await text.PrepareContentForAI(),
_ => string.Empty, _ => string.Empty,
} }
}).ToList()], });
// Prepare the Anthropic HTTP chat request:
var chatRequest = JsonSerializer.Serialize(new ChatRequest
{
Model = chatModel.Id,
// Build the messages:
Messages = [..messages],
System = chatThread.PrepareSystemPrompt(settingsManager, chatThread), System = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
MaxTokens = apiParameters.TryGetValue("max_tokens", out var value) && value is int intValue ? intValue : 4_096, MaxTokens = apiParameters.TryGetValue("max_tokens", out var value) && value is int intValue ? intValue : 4_096,

View File

@ -39,15 +39,8 @@ public sealed class ProviderDeepSeek() : BaseProvider("https://api.deepseek.com/
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the DeepSeek HTTP chat request: // Build the list of messages:
var deepSeekChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest var messages = await chatThread.Blocks.BuildMessages(async n => new Message
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
{ {
Role = n.Role switch Role = n.Role switch
{ {
@ -61,10 +54,21 @@ public sealed class ProviderDeepSeek() : BaseProvider("https://api.deepseek.com/
Content = n.Content switch Content = n.Content switch
{ {
ContentText text => text.Text, ContentText text => await text.PrepareContentForAI(),
_ => string.Empty, _ => string.Empty,
} }
}).ToList()], });
// Prepare the DeepSeek HTTP chat request:
var deepSeekChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..messages],
Stream = true, Stream = true,
AdditionalApiParameters = apiParameters AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS); }, JSON_SERIALIZER_OPTIONS);

View File

@ -39,15 +39,8 @@ public class ProviderFireworks() : BaseProvider("https://api.fireworks.ai/infere
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the Fireworks HTTP chat request: // Build the list of messages:
var fireworksChatRequest = JsonSerializer.Serialize(new ChatRequest var messages = await chatThread.Blocks.BuildMessages(async n => new Message
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
{ {
Role = n.Role switch Role = n.Role switch
{ {
@ -61,10 +54,20 @@ public class ProviderFireworks() : BaseProvider("https://api.fireworks.ai/infere
Content = n.Content switch Content = n.Content switch
{ {
ContentText text => text.Text, ContentText text => await text.PrepareContentForAI(),
_ => string.Empty, _ => string.Empty,
} }
}).ToList()], });
// Prepare the Fireworks HTTP chat request:
var fireworksChatRequest = JsonSerializer.Serialize(new ChatRequest
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..messages],
// Right now, we only support streaming completions: // Right now, we only support streaming completions:
Stream = true, Stream = true,

View File

@ -39,15 +39,8 @@ public sealed class ProviderGWDG() : BaseProvider("https://chat-ai.academiccloud
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the GWDG HTTP chat request: // Build the list of messages:
var gwdgChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest var messages = await chatThread.Blocks.BuildMessages(async n => new Message
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
{ {
Role = n.Role switch Role = n.Role switch
{ {
@ -61,10 +54,21 @@ public sealed class ProviderGWDG() : BaseProvider("https://chat-ai.academiccloud
Content = n.Content switch Content = n.Content switch
{ {
ContentText text => text.Text, ContentText text => await text.PrepareContentForAI(),
_ => string.Empty, _ => string.Empty,
} }
}).ToList()], });
// Prepare the GWDG HTTP chat request:
var gwdgChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..messages],
Stream = true, Stream = true,
AdditionalApiParameters = apiParameters AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS); }, JSON_SERIALIZER_OPTIONS);

View File

@ -39,15 +39,8 @@ public class ProviderGoogle() : BaseProvider("https://generativelanguage.googlea
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the Google HTTP chat request: // Build the list of messages:
var geminiChatRequest = JsonSerializer.Serialize(new ChatRequest var messages = await chatThread.Blocks.BuildMessages(async n => new Message
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
{ {
Role = n.Role switch Role = n.Role switch
{ {
@ -61,10 +54,20 @@ public class ProviderGoogle() : BaseProvider("https://generativelanguage.googlea
Content = n.Content switch Content = n.Content switch
{ {
ContentText text => text.Text, ContentText text => await text.PrepareContentForAI(),
_ => string.Empty, _ => string.Empty,
} }
}).ToList()], });
// Prepare the Google HTTP chat request:
var geminiChatRequest = JsonSerializer.Serialize(new ChatRequest
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..messages],
// Right now, we only support streaming completions: // Right now, we only support streaming completions:
Stream = true, Stream = true,

View File

@ -39,15 +39,8 @@ public class ProviderGroq() : BaseProvider("https://api.groq.com/openai/v1/", LO
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the OpenAI HTTP chat request: // Build the list of messages:
var groqChatRequest = JsonSerializer.Serialize(new ChatRequest var messages = await chatThread.Blocks.BuildMessages(async n => new Message
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
{ {
Role = n.Role switch Role = n.Role switch
{ {
@ -61,10 +54,20 @@ public class ProviderGroq() : BaseProvider("https://api.groq.com/openai/v1/", LO
Content = n.Content switch Content = n.Content switch
{ {
ContentText text => text.Text, ContentText text => await text.PrepareContentForAI(),
_ => string.Empty, _ => string.Empty,
} }
}).ToList()], });
// Prepare the OpenAI HTTP chat request:
var groqChatRequest = JsonSerializer.Serialize(new ChatRequest
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..messages],
// Right now, we only support streaming completions: // Right now, we only support streaming completions:
Stream = true, Stream = true,

View File

@ -39,15 +39,8 @@ public sealed class ProviderHelmholtz() : BaseProvider("https://api.helmholtz-bl
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the Helmholtz HTTP chat request: // Build the list of messages:
var helmholtzChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest var messages = await chatThread.Blocks.BuildMessages(async n => new Message
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
{ {
Role = n.Role switch Role = n.Role switch
{ {
@ -61,10 +54,21 @@ public sealed class ProviderHelmholtz() : BaseProvider("https://api.helmholtz-bl
Content = n.Content switch Content = n.Content switch
{ {
ContentText text => text.Text, ContentText text => await text.PrepareContentForAI(),
_ => string.Empty, _ => string.Empty,
} }
}).ToList()], });
// Prepare the Helmholtz HTTP chat request:
var helmholtzChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..messages],
Stream = true, Stream = true,
AdditionalApiParameters = apiParameters AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS); }, JSON_SERIALIZER_OPTIONS);

View File

@ -44,15 +44,8 @@ public sealed class ProviderHuggingFace : BaseProvider
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the HuggingFace HTTP chat request: // Build the list of messages:
var huggingfaceChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest var message = await chatThread.Blocks.BuildMessages(async n => new Message
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
{ {
Role = n.Role switch Role = n.Role switch
{ {
@ -66,10 +59,21 @@ public sealed class ProviderHuggingFace : BaseProvider
Content = n.Content switch Content = n.Content switch
{ {
ContentText text => text.Text, ContentText text => await text.PrepareContentForAI(),
_ => string.Empty, _ => string.Empty,
} }
}).ToList()], });
// Prepare the HuggingFace HTTP chat request:
var huggingfaceChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..message],
Stream = true, Stream = true,
AdditionalApiParameters = apiParameters AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS); }, JSON_SERIALIZER_OPTIONS);

View File

@ -37,15 +37,8 @@ public sealed class ProviderMistral() : BaseProvider("https://api.mistral.ai/v1/
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the Mistral HTTP chat request: // Build the list of messages:
var mistralChatRequest = JsonSerializer.Serialize(new ChatRequest var messages = await chatThread.Blocks.BuildMessages(async n => new RegularMessage
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new RegularMessage
{ {
Role = n.Role switch Role = n.Role switch
{ {
@ -59,10 +52,20 @@ public sealed class ProviderMistral() : BaseProvider("https://api.mistral.ai/v1/
Content = n.Content switch Content = n.Content switch
{ {
ContentText text => text.Text, ContentText text => await text.PrepareContentForAI(),
_ => string.Empty, _ => string.Empty,
} }
}).ToList()], });
// Prepare the Mistral HTTP chat request:
var mistralChatRequest = JsonSerializer.Serialize(new ChatRequest
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..messages],
// Right now, we only support streaming completions: // Right now, we only support streaming completions:
Stream = true, Stream = true,
@ -70,6 +73,7 @@ public sealed class ProviderMistral() : BaseProvider("https://api.mistral.ai/v1/
AdditionalApiParameters = apiParameters AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS); }, JSON_SERIALIZER_OPTIONS);
async Task<HttpRequestMessage> RequestBuilder() async Task<HttpRequestMessage> RequestBuilder()
{ {
// Build the HTTP post request: // Build the HTTP post request:

View File

@ -89,20 +89,8 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters("input", "store", "tools"); var apiParameters = this.ParseAdditionalApiParameters("input", "store", "tools");
// // Build the list of messages:
// Create the request: either for the Responses API or the Chat Completion API var messages = await chatThread.Blocks.BuildMessages(async n => new Message
//
var openAIChatRequest = usingResponsesAPI switch
{
// Chat Completion API request:
false => JsonSerializer.Serialize(new ChatCompletionAPIRequest
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
{ {
Role = n.Role switch Role = n.Role switch
{ {
@ -116,10 +104,25 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
Content = n.Content switch Content = n.Content switch
{ {
ContentText text => text.Text, ContentText text => await text.PrepareContentForAI(),
_ => string.Empty, _ => string.Empty,
} }
}).ToList()], });
//
// Create the request: either for the Responses API or the Chat Completion API
//
var openAIChatRequest = usingResponsesAPI switch
{
// Chat Completion API request:
false => JsonSerializer.Serialize(new ChatCompletionAPIRequest
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..messages],
// Right now, we only support streaming completions: // Right now, we only support streaming completions:
Stream = true, Stream = true,

View File

@ -48,15 +48,8 @@ public sealed class ProviderPerplexity() : BaseProvider("https://api.perplexity.
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the Perplexity HTTP chat request: // Build the list of messages:
var perplexityChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest var messages = await chatThread.Blocks.BuildMessages(async n => new Message()
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
{ {
Role = n.Role switch Role = n.Role switch
{ {
@ -70,10 +63,20 @@ public sealed class ProviderPerplexity() : BaseProvider("https://api.perplexity.
Content = n.Content switch Content = n.Content switch
{ {
ContentText text => text.Text, ContentText text => await text.PrepareContentForAI(),
_ => string.Empty, _ => string.Empty,
} }
}).ToList()], });
// Prepare the Perplexity HTTP chat request:
var perplexityChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..messages],
Stream = true, Stream = true,
AdditionalApiParameters = apiParameters AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS); }, JSON_SERIALIZER_OPTIONS);

View File

@ -35,15 +35,8 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the OpenAI HTTP chat request: // Build the list of messages:
var providerChatRequest = JsonSerializer.Serialize(new ChatRequest var messages = await chatThread.Blocks.BuildMessages(async n => new Message
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
{ {
Role = n.Role switch Role = n.Role switch
{ {
@ -57,10 +50,20 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
Content = n.Content switch Content = n.Content switch
{ {
ContentText text => text.Text, ContentText text => await text.PrepareContentForAI(),
_ => string.Empty, _ => string.Empty,
} }
}).ToList()], });
// Prepare the OpenAI HTTP chat request:
var providerChatRequest = JsonSerializer.Serialize(new ChatRequest
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..messages],
// Right now, we only support streaming completions: // Right now, we only support streaming completions:
Stream = true, Stream = true,

View File

@ -39,15 +39,8 @@ public sealed class ProviderX() : BaseProvider("https://api.x.ai/v1/", LOGGER)
// Parse the API parameters: // Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters(); var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the xAI HTTP chat request: // Build the list of messages:
var xChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest var messages = await chatThread.Blocks.BuildMessages(async n => new Message()
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
{ {
Role = n.Role switch Role = n.Role switch
{ {
@ -61,10 +54,20 @@ public sealed class ProviderX() : BaseProvider("https://api.x.ai/v1/", LOGGER)
Content = n.Content switch Content = n.Content switch
{ {
ContentText text => text.Text, ContentText text => await text.PrepareContentForAI(),
_ => string.Empty, _ => string.Empty,
} }
}).ToList()], });
// Prepare the xAI HTTP chat request:
var xChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..messages],
// Right now, we only support streaming completions: // Right now, we only support streaming completions:
Stream = true, Stream = true,

View File

@ -0,0 +1,85 @@
using AIStudio.Dialogs;
using AIStudio.Tools.PluginSystem;
using DialogOptions = AIStudio.Dialogs.DialogOptions;
namespace AIStudio.Tools.Services;
/// <summary>
/// Service to check Pandoc availability and ensure installation.
/// This service encapsulates the logic for checking if Pandoc is installed
/// and showing the installation dialog if needed.
/// </summary>
public sealed class PandocAvailabilityService(RustService rustService, IDialogService dialogService, ILogger<PandocAvailabilityService> logger)
{
private static string TB(string fallbackEN) => I18N.I.T(fallbackEN, typeof(PandocAvailabilityService).Namespace, nameof(PandocAvailabilityService));
private RustService RustService => rustService;
private IDialogService DialogService => dialogService;
private ILogger<PandocAvailabilityService> Logger => logger;
private PandocInstallation? cachedInstallation;
/// <summary>
/// Checks if Pandoc is available and shows the installation dialog if needed.
/// </summary>
/// <param name="showSuccessMessage">Whether to show a success message if Pandoc is available.</param>
/// <param name="showDialog">Whether to show the installation dialog if Pandoc is not available.</param>
/// <returns>The Pandoc installation state.</returns>
public async Task<PandocInstallation> EnsureAvailabilityAsync(bool showSuccessMessage = false, bool showDialog = true)
{
// Check if Pandoc is available:
var pandocState = await Pandoc.CheckAvailabilityAsync(this.RustService, showMessages: false, showSuccessMessage: showSuccessMessage);
// Cache the result:
this.cachedInstallation = pandocState;
// If not available, show installation dialog:
if (!pandocState.IsAvailable && showDialog)
{
var dialogParameters = new DialogParameters<PandocDialog>
{
{ x => x.ShowInitialResultInSnackbar, false },
};
var dialogReference = await this.DialogService.ShowAsync<PandocDialog>(TB("Pandoc Installation"), dialogParameters, DialogOptions.FULLSCREEN);
await dialogReference.Result;
// Re-check availability after dialog:
pandocState = await Pandoc.CheckAvailabilityAsync(this.RustService, showMessages: showSuccessMessage, showSuccessMessage: showSuccessMessage);
this.cachedInstallation = pandocState;
if (!pandocState.IsAvailable)
{
this.Logger.LogError("Pandoc is not available after installation attempt.");
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Cancel, TB("Pandoc may be required for importing files.")));
}
}
return pandocState;
}
/// <summary>
/// Checks if Pandoc is available without showing any dialogs or messages.
/// Uses cached result if available to avoid redundant checks.
/// </summary>
/// <returns>True if Pandoc is available, false otherwise.</returns>
public async Task<bool> IsAvailableAsync()
{
if (this.cachedInstallation.HasValue)
return this.cachedInstallation.Value.IsAvailable;
var pandocState = await Pandoc.CheckAvailabilityAsync(this.RustService, showMessages: false, showSuccessMessage: false);
this.cachedInstallation = pandocState;
return pandocState.IsAvailable;
}
/// <summary>
/// Clears the cached Pandoc installation state.
/// Useful when the installation state might have changed.
/// </summary>
public void ClearCache() => this.cachedInstallation = null;
}

View File

@ -1,6 +1,7 @@
# v0.9.55, build 230 (2025-12-xx xx:xx UTC) # v0.9.55, build 230 (2025-12-xx xx:xx UTC)
- Added support for newer Mistral models (Mistral 3, Voxtral, and Magistral). - Added support for newer Mistral models (Mistral 3, Voxtral, and Magistral).
- Added a description field to local data sources (preview feature) so that the data selection agent has more information about which data each local source contains when selecting data sources. - Added a description field to local data sources (preview feature) so that the data selection agent has more information about which data each local source contains when selecting data sources.
- Added the ability to use file attachments in chat. This is the initial implementation of this feature. We will continue to develop this feature and refine it further based on user feedback. Many thanks to Sabrina `Sabrina-devops` for this wonderful contribution.
- Improved the document analysis assistant (in preview) by adding descriptions to the different sections. - Improved the document analysis assistant (in preview) by adding descriptions to the different sections.
- Improved the document preview dialog for the document analysis assistant (in preview), providing Markdown and plain text views for attached files. - Improved the document preview dialog for the document analysis assistant (in preview), providing Markdown and plain text views for attached files.
- Improved the ID handling for configuration plugins. - Improved the ID handling for configuration plugins.