mirror of
https://github.com/MindWorkAI/AI-Studio.git
synced 2026-02-13 21:21:36 +00:00
Add changes to the provider and add the AttachFile button
This commit is contained in:
parent
311092ec5e
commit
2ecc883cc8
@ -91,7 +91,7 @@ else
|
||||
@T("Documents for the analysis")
|
||||
</MudText>
|
||||
|
||||
<AttachDocuments Name="Document Analysis Files Drop" @bind-DocumentPaths="@this.loadedDocumentPaths" CatchAllDocuments="true"/>
|
||||
<AttachDocuments Name="Document Analysis Files Drop" @bind-DocumentPaths="@this.loadedDocumentPaths" CatchAllDocuments="true" UseSmallForm="false"/>
|
||||
|
||||
</ExpansionPanel>
|
||||
</MudExpansionPanels>
|
||||
|
||||
@ -1495,12 +1495,18 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T2928927510"] = "Videos
|
||||
-- Images are not supported yet
|
||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T298062956"] = "Images are not supported yet"
|
||||
|
||||
-- Click to attach files
|
||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T3521845090"] = "Click to attach files"
|
||||
|
||||
-- Clear file list
|
||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T3759696136"] = "Clear file list"
|
||||
|
||||
-- Executables are not allowed
|
||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T4167762413"] = "Executables are not allowed"
|
||||
|
||||
-- Attach file
|
||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T572534842"] = "Attach file"
|
||||
|
||||
-- Select a file to attach
|
||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ATTACHDOCUMENTS::T595772870"] = "Select a file to attach"
|
||||
|
||||
|
||||
@ -30,6 +30,9 @@ public sealed class ContentImage : IContent, IImageSource
|
||||
/// <inheritdoc />
|
||||
public List<Source> Sources { get; set; } = [];
|
||||
|
||||
/// <inheritdoc />
|
||||
public List<string> FileAttachments { get; set; } = [];
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<ChatThread> CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastUserPrompt, ChatThread? chatChatThread, CancellationToken token = default)
|
||||
{
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
using System.Text;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
using AIStudio.Provider;
|
||||
@ -39,6 +40,9 @@ public sealed class ContentText : IContent
|
||||
|
||||
/// <inheritdoc />
|
||||
public List<Source> Sources { get; set; } = [];
|
||||
|
||||
/// <inheritdoc />
|
||||
public List<string> FileAttachments { get; set; } = [];
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ChatThread> CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastUserPrompt, ChatThread? chatThread, CancellationToken token = default)
|
||||
@ -139,9 +143,34 @@ public sealed class ContentText : IContent
|
||||
Text = this.Text,
|
||||
InitialRemoteWait = this.InitialRemoteWait,
|
||||
IsStreaming = this.IsStreaming,
|
||||
Sources = [..this.Sources],
|
||||
FileAttachments = [..this.FileAttachments],
|
||||
};
|
||||
|
||||
#endregion
|
||||
|
||||
public async Task<string> PrepareContentForAI()
|
||||
{
|
||||
var sb = new StringBuilder();
|
||||
sb.AppendLine(this.Text);
|
||||
|
||||
if(this.FileAttachments.Count > 0)
|
||||
{
|
||||
sb.AppendLine();
|
||||
sb.AppendLine("The following files are attached to this message:");
|
||||
foreach(var file in this.FileAttachments)
|
||||
{
|
||||
sb.AppendLine();
|
||||
sb.AppendLine("---------------------------------------");
|
||||
sb.AppendLine($"File path: {file}");
|
||||
sb.AppendLine("File content:");
|
||||
sb.AppendLine("````");
|
||||
sb.AppendLine(await Program.RUST_SERVICE.ReadArbitraryFileData(file, int.MaxValue));
|
||||
sb.AppendLine("````");
|
||||
}
|
||||
}
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The text content.
|
||||
|
||||
@ -47,6 +47,13 @@ public interface IContent
|
||||
/// </remarks>
|
||||
[JsonIgnore]
|
||||
public List<Source> Sources { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Represents a collection of file attachments associated with the content.
|
||||
/// This property contains a list of file paths that are appended
|
||||
/// to the content to provide additional context or resources.
|
||||
/// </summary>
|
||||
public List<string> FileAttachments { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Uses the provider to create the content.
|
||||
|
||||
25
app/MindWork AI Studio/Chat/ListContentBlockExtensions.cs
Normal file
25
app/MindWork AI Studio/Chat/ListContentBlockExtensions.cs
Normal file
@ -0,0 +1,25 @@
|
||||
namespace AIStudio.Chat;
|
||||
|
||||
public static class ListContentBlockExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Processes a list of content blocks by transforming them into a collection of message results asynchronously.
|
||||
/// </summary>
|
||||
/// <param name="blocks">The list of content blocks to process.</param>
|
||||
/// <param name="transformer">A function that transforms each content block into a message result asynchronously.</param>
|
||||
/// <typeparam name="TResult">The type of the result produced by the transformation function.</typeparam>
|
||||
/// <returns>An asynchronous task that resolves to a list of transformed results.</returns>
|
||||
public static async Task<IList<TResult>> BuildMessages<TResult>(this List<ContentBlock> blocks, Func<ContentBlock, Task<TResult>> transformer)
|
||||
{
|
||||
var messages = blocks
|
||||
.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text))
|
||||
.Select(transformer)
|
||||
.ToList();
|
||||
|
||||
// Await all messages:
|
||||
await Task.WhenAll(messages);
|
||||
|
||||
// Select all results:
|
||||
return (IList<TResult>)messages.Select(n => n.Result);
|
||||
}
|
||||
}
|
||||
@ -1,20 +1,52 @@
|
||||
@inherits MSGComponentBase
|
||||
|
||||
<div @onmouseenter="@this.OnMouseEnter" @onmouseleave="@this.OnMouseLeave">
|
||||
<MudLink OnClick="@(() => this.AddFilesManually())" Style="text-decoration: none;">
|
||||
<MudPaper Height="20em" Outlined="true" Class="@this.dragClass" Style="overflow-y: auto;">
|
||||
<MudText Typo="Typo.h6">
|
||||
@T("Drag and drop files here or click to attach documents.")
|
||||
</MudText>
|
||||
@foreach (var fileInfo in this.DocumentPaths.Select(file => new FileInfo(file)))
|
||||
@if (this.UseSmallForm)
|
||||
{
|
||||
<div @onmouseenter="@this.OnMouseEnter" @onmouseleave="@this.OnMouseLeave">
|
||||
@{
|
||||
var fileInfos = this.DocumentPaths.Select(file => new FileInfo(file)).ToList();
|
||||
}
|
||||
@if (fileInfos.Any())
|
||||
{
|
||||
<MudBadge Origin="Origin.TopCenter" Icon="@Icons.Material.Filled.Search" Color="Color.Primary" Overlap="true" Bordered="true" OnClick="@(() => this.InvestigateFile(@fileInfo))">
|
||||
<MudChip T="string" Color="Color.Dark" Text="@fileInfo.Name" tabindex="-1" Icon="@Icons.Material.Filled.Search" OnClick="@(() => this.InvestigateFile(@fileInfo))" OnClose="@(() => this.RemoveDocumentPathFromDocumentPaths(@fileInfo))"/>
|
||||
<MudBadge
|
||||
Content="@this.DocumentPaths.Count"
|
||||
Color="Color.Primary"
|
||||
Overlap="true">
|
||||
<MudIconButton
|
||||
Icon="@Icons.Material.Filled.AttachFile"
|
||||
Color="Color.Default"
|
||||
OnClick="@AddFilesManually"/>
|
||||
</MudBadge>
|
||||
}
|
||||
</MudPaper>
|
||||
</MudLink>
|
||||
<MudButton OnClick="@(async () => await this.ClearAllFiles())" Variant="Variant.Filled" Color="Color.Info" Class="mt-2" StartIcon="@Icons.Material.Filled.Delete">
|
||||
@T("Clear file list")
|
||||
</MudButton>
|
||||
</div>
|
||||
else
|
||||
{
|
||||
<MudTooltip Text="@T("Click to attach files")" Placement="@TOOLBAR_TOOLTIP_PLACEMENT">
|
||||
<MudIconButton
|
||||
Icon="@Icons.Material.Filled.AttachFile"
|
||||
Color="Color.Default"
|
||||
OnClick="@AddFilesManually"/>
|
||||
</MudTooltip>
|
||||
}
|
||||
</div>
|
||||
}
|
||||
else
|
||||
{
|
||||
<div @onmouseenter="@this.OnMouseEnter" @onmouseleave="@this.OnMouseLeave">
|
||||
<MudLink OnClick="@(() => this.AddFilesManually())" Style="text-decoration: none;">
|
||||
<MudPaper Height="20em" Outlined="true" Class="@this.dragClass" Style="overflow-y: auto;">
|
||||
<MudText Typo="Typo.h6">
|
||||
@T("Drag and drop files here or click to attach documents.")
|
||||
</MudText>
|
||||
@foreach (var fileInfo in this.DocumentPaths.Select(file => new FileInfo(file)))
|
||||
{
|
||||
<MudBadge Origin="Origin.TopCenter" Icon="@Icons.Material.Filled.Search" Color="Color.Primary" Overlap="true" Bordered="true" OnClick="@(() => this.InvestigateFile(@fileInfo))">
|
||||
<MudChip T="string" Color="Color.Dark" Text="@fileInfo.Name" tabindex="-1" Icon="@Icons.Material.Filled.Search" OnClick="@(() => this.InvestigateFile(@fileInfo))" OnClose="@(() => this.RemoveDocumentPathFromDocumentPaths(@fileInfo))"/>
|
||||
</MudBadge>
|
||||
}
|
||||
</MudPaper>
|
||||
</MudLink>
|
||||
<MudButton OnClick="@(async () => await this.ClearAllFiles())" Variant="Variant.Filled" Color="Color.Info" Class="mt-2" StartIcon="@Icons.Material.Filled.Delete">
|
||||
@T("Clear file list")
|
||||
</MudButton>
|
||||
</div>
|
||||
}
|
||||
@ -28,6 +28,9 @@ public partial class AttachDocuments : MSGComponentBase
|
||||
[Parameter]
|
||||
public bool CatchAllDocuments { get; set; }
|
||||
|
||||
[Parameter]
|
||||
public bool UseSmallForm { get; set; }
|
||||
|
||||
[Inject]
|
||||
private ILogger<AttachDocuments> Logger { get; set; } = null!;
|
||||
|
||||
@ -36,6 +39,7 @@ public partial class AttachDocuments : MSGComponentBase
|
||||
|
||||
[Inject]
|
||||
private IDialogService DialogService { get; init; } = null!;
|
||||
private const Placement TOOLBAR_TOOLTIP_PLACEMENT = Placement.Top;
|
||||
|
||||
#region Overrides of MSGComponentBase
|
||||
|
||||
|
||||
@ -83,6 +83,8 @@
|
||||
|
||||
<ChatTemplateSelection CanChatThreadBeUsedForTemplate="@this.CanThreadBeSaved" CurrentChatThread="@this.ChatThread" CurrentChatTemplate="@this.currentChatTemplate" CurrentChatTemplateChanged="@this.ChatTemplateWasChanged"/>
|
||||
|
||||
<AttachDocuments Name="Document Files Drop" @bind-DocumentPaths="@this.chatDocumentPaths" CatchAllDocuments="true" UseSmallForm="true"/>
|
||||
|
||||
@if (this.SettingsManager.ConfigurationData.Workspace.StorageBehavior is WorkspaceStorageBehavior.STORE_CHATS_AUTOMATICALLY)
|
||||
{
|
||||
<MudTooltip Text="@T("Delete this chat & start a new one.")" Placement="@TOOLBAR_TOOLTIP_PLACEMENT">
|
||||
|
||||
@ -57,6 +57,8 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
|
||||
private string currentWorkspaceName = string.Empty;
|
||||
private Guid currentWorkspaceId = Guid.Empty;
|
||||
private CancellationTokenSource? cancellationTokenSource;
|
||||
private List<string> fileAttachments = new();
|
||||
private HashSet<string> chatDocumentPaths = [];
|
||||
|
||||
// Unfortunately, we need the input field reference to blur the focus away. Without
|
||||
// this, we cannot clear the input field.
|
||||
@ -462,6 +464,7 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
|
||||
lastUserPrompt = new ContentText
|
||||
{
|
||||
Text = this.userInput,
|
||||
FileAttachments = this.fileAttachments.ToList(), // Create a copy
|
||||
};
|
||||
|
||||
//
|
||||
@ -507,6 +510,7 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
|
||||
// Clear the input field:
|
||||
await this.inputField.FocusAsync();
|
||||
this.userInput = string.Empty;
|
||||
this.fileAttachments.Clear();
|
||||
await this.inputField.BlurAsync();
|
||||
|
||||
// Enable the stream state for the chat component:
|
||||
@ -575,6 +579,7 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
|
||||
this.hasUnsavedChanges = false;
|
||||
}
|
||||
|
||||
|
||||
private async Task StartNewChat(bool useSameWorkspace = false, bool deletePreviousChat = false)
|
||||
{
|
||||
//
|
||||
|
||||
@ -23,7 +23,7 @@ public abstract class SettingsDialogBase : MSGComponentBase
|
||||
protected readonly List<ConfigurationSelectData<string>> availableEmbeddingProviders = new();
|
||||
|
||||
#region Overrides of ComponentBase
|
||||
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override async Task OnInitializedAsync()
|
||||
{
|
||||
|
||||
@ -13,7 +13,7 @@ public partial class SettingsDialogChatTemplate : SettingsDialogBase
|
||||
public ChatThread? ExistingChatThread { get; set; }
|
||||
|
||||
#region Overrides of ComponentBase
|
||||
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override async Task OnInitializedAsync()
|
||||
{
|
||||
|
||||
@ -39,6 +39,26 @@ public sealed class ProviderAlibabaCloud() : BaseProvider("https://dashscope-int
|
||||
// Parse the API parameters:
|
||||
var apiParameters = this.ParseAdditionalApiParameters();
|
||||
|
||||
// Build the list of messages:
|
||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => await text.PrepareContentForAI(),
|
||||
_ => string.Empty,
|
||||
}
|
||||
});
|
||||
|
||||
// Prepare the AlibabaCloud HTTP chat request:
|
||||
var alibabaCloudChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
|
||||
{
|
||||
@ -47,24 +67,8 @@ public sealed class ProviderAlibabaCloud() : BaseProvider("https://dashscope-int
|
||||
// Build the messages:
|
||||
// - First of all the system prompt
|
||||
// - Then none-empty user and AI messages
|
||||
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => text.Text,
|
||||
_ => string.Empty,
|
||||
}
|
||||
}).ToList()],
|
||||
Messages = [systemPrompt, ..messages],
|
||||
|
||||
Stream = true,
|
||||
AdditionalApiParameters = apiParameters
|
||||
}, JSON_SERIALIZER_OPTIONS);
|
||||
|
||||
@ -30,29 +30,32 @@ public sealed class ProviderAnthropic() : BaseProvider("https://api.anthropic.co
|
||||
// Parse the API parameters:
|
||||
var apiParameters = this.ParseAdditionalApiParameters("system");
|
||||
|
||||
// Build the list of messages:
|
||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => await text.PrepareContentForAI(),
|
||||
_ => string.Empty,
|
||||
}
|
||||
});
|
||||
|
||||
// Prepare the Anthropic HTTP chat request:
|
||||
var chatRequest = JsonSerializer.Serialize(new ChatRequest
|
||||
{
|
||||
Model = chatModel.Id,
|
||||
|
||||
// Build the messages:
|
||||
Messages = [..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => text.Text,
|
||||
_ => string.Empty,
|
||||
}
|
||||
}).ToList()],
|
||||
Messages = [..messages],
|
||||
|
||||
System = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
||||
MaxTokens = apiParameters.TryGetValue("max_tokens", out var value) && value is int intValue ? intValue : 4_096,
|
||||
|
||||
@ -39,6 +39,26 @@ public sealed class ProviderDeepSeek() : BaseProvider("https://api.deepseek.com/
|
||||
// Parse the API parameters:
|
||||
var apiParameters = this.ParseAdditionalApiParameters();
|
||||
|
||||
// Build the list of messages:
|
||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => await text.PrepareContentForAI(),
|
||||
_ => string.Empty,
|
||||
}
|
||||
});
|
||||
|
||||
// Prepare the DeepSeek HTTP chat request:
|
||||
var deepSeekChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
|
||||
{
|
||||
@ -47,24 +67,8 @@ public sealed class ProviderDeepSeek() : BaseProvider("https://api.deepseek.com/
|
||||
// Build the messages:
|
||||
// - First of all the system prompt
|
||||
// - Then none-empty user and AI messages
|
||||
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => text.Text,
|
||||
_ => string.Empty,
|
||||
}
|
||||
}).ToList()],
|
||||
Messages = [systemPrompt, ..messages],
|
||||
|
||||
Stream = true,
|
||||
AdditionalApiParameters = apiParameters
|
||||
}, JSON_SERIALIZER_OPTIONS);
|
||||
|
||||
@ -39,6 +39,26 @@ public class ProviderFireworks() : BaseProvider("https://api.fireworks.ai/infere
|
||||
// Parse the API parameters:
|
||||
var apiParameters = this.ParseAdditionalApiParameters();
|
||||
|
||||
// Build the list of messages:
|
||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => await text.PrepareContentForAI(),
|
||||
_ => string.Empty,
|
||||
}
|
||||
});
|
||||
|
||||
// Prepare the Fireworks HTTP chat request:
|
||||
var fireworksChatRequest = JsonSerializer.Serialize(new ChatRequest
|
||||
{
|
||||
@ -47,24 +67,7 @@ public class ProviderFireworks() : BaseProvider("https://api.fireworks.ai/infere
|
||||
// Build the messages:
|
||||
// - First of all the system prompt
|
||||
// - Then none-empty user and AI messages
|
||||
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => text.Text,
|
||||
_ => string.Empty,
|
||||
}
|
||||
}).ToList()],
|
||||
Messages = [systemPrompt, ..messages],
|
||||
|
||||
// Right now, we only support streaming completions:
|
||||
Stream = true,
|
||||
|
||||
@ -39,6 +39,26 @@ public sealed class ProviderGWDG() : BaseProvider("https://chat-ai.academiccloud
|
||||
// Parse the API parameters:
|
||||
var apiParameters = this.ParseAdditionalApiParameters();
|
||||
|
||||
// Build the list of messages:
|
||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => await text.PrepareContentForAI(),
|
||||
_ => string.Empty,
|
||||
}
|
||||
});
|
||||
|
||||
// Prepare the GWDG HTTP chat request:
|
||||
var gwdgChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
|
||||
{
|
||||
@ -47,24 +67,8 @@ public sealed class ProviderGWDG() : BaseProvider("https://chat-ai.academiccloud
|
||||
// Build the messages:
|
||||
// - First of all the system prompt
|
||||
// - Then none-empty user and AI messages
|
||||
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => text.Text,
|
||||
_ => string.Empty,
|
||||
}
|
||||
}).ToList()],
|
||||
Messages = [systemPrompt, ..messages],
|
||||
|
||||
Stream = true,
|
||||
AdditionalApiParameters = apiParameters
|
||||
}, JSON_SERIALIZER_OPTIONS);
|
||||
|
||||
@ -39,6 +39,26 @@ public class ProviderGoogle() : BaseProvider("https://generativelanguage.googlea
|
||||
// Parse the API parameters:
|
||||
var apiParameters = this.ParseAdditionalApiParameters();
|
||||
|
||||
// Build the list of messages:
|
||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => await text.PrepareContentForAI(),
|
||||
_ => string.Empty,
|
||||
}
|
||||
});
|
||||
|
||||
// Prepare the Google HTTP chat request:
|
||||
var geminiChatRequest = JsonSerializer.Serialize(new ChatRequest
|
||||
{
|
||||
@ -47,24 +67,7 @@ public class ProviderGoogle() : BaseProvider("https://generativelanguage.googlea
|
||||
// Build the messages:
|
||||
// - First of all the system prompt
|
||||
// - Then none-empty user and AI messages
|
||||
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => text.Text,
|
||||
_ => string.Empty,
|
||||
}
|
||||
}).ToList()],
|
||||
Messages = [systemPrompt, ..messages],
|
||||
|
||||
// Right now, we only support streaming completions:
|
||||
Stream = true,
|
||||
|
||||
@ -39,6 +39,26 @@ public class ProviderGroq() : BaseProvider("https://api.groq.com/openai/v1/", LO
|
||||
// Parse the API parameters:
|
||||
var apiParameters = this.ParseAdditionalApiParameters();
|
||||
|
||||
// Build the list of messages:
|
||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => await text.PrepareContentForAI(),
|
||||
_ => string.Empty,
|
||||
}
|
||||
});
|
||||
|
||||
// Prepare the OpenAI HTTP chat request:
|
||||
var groqChatRequest = JsonSerializer.Serialize(new ChatRequest
|
||||
{
|
||||
@ -47,24 +67,7 @@ public class ProviderGroq() : BaseProvider("https://api.groq.com/openai/v1/", LO
|
||||
// Build the messages:
|
||||
// - First of all the system prompt
|
||||
// - Then none-empty user and AI messages
|
||||
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => text.Text,
|
||||
_ => string.Empty,
|
||||
}
|
||||
}).ToList()],
|
||||
Messages = [systemPrompt, ..messages],
|
||||
|
||||
// Right now, we only support streaming completions:
|
||||
Stream = true,
|
||||
|
||||
@ -38,6 +38,26 @@ public sealed class ProviderHelmholtz() : BaseProvider("https://api.helmholtz-bl
|
||||
|
||||
// Parse the API parameters:
|
||||
var apiParameters = this.ParseAdditionalApiParameters();
|
||||
|
||||
// Build the list of messages:
|
||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => await text.PrepareContentForAI(),
|
||||
_ => string.Empty,
|
||||
}
|
||||
});
|
||||
|
||||
// Prepare the Helmholtz HTTP chat request:
|
||||
var helmholtzChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
|
||||
@ -47,24 +67,8 @@ public sealed class ProviderHelmholtz() : BaseProvider("https://api.helmholtz-bl
|
||||
// Build the messages:
|
||||
// - First of all the system prompt
|
||||
// - Then none-empty user and AI messages
|
||||
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => text.Text,
|
||||
_ => string.Empty,
|
||||
}
|
||||
}).ToList()],
|
||||
Messages = [systemPrompt, ..messages],
|
||||
|
||||
Stream = true,
|
||||
AdditionalApiParameters = apiParameters
|
||||
}, JSON_SERIALIZER_OPTIONS);
|
||||
|
||||
@ -44,6 +44,26 @@ public sealed class ProviderHuggingFace : BaseProvider
|
||||
// Parse the API parameters:
|
||||
var apiParameters = this.ParseAdditionalApiParameters();
|
||||
|
||||
// Build the list of messages:
|
||||
var message = await chatThread.Blocks.BuildMessages(async n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => await text.PrepareContentForAI(),
|
||||
_ => string.Empty,
|
||||
}
|
||||
});
|
||||
|
||||
// Prepare the HuggingFace HTTP chat request:
|
||||
var huggingfaceChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
|
||||
{
|
||||
@ -52,24 +72,8 @@ public sealed class ProviderHuggingFace : BaseProvider
|
||||
// Build the messages:
|
||||
// - First of all the system prompt
|
||||
// - Then none-empty user and AI messages
|
||||
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => text.Text,
|
||||
_ => string.Empty,
|
||||
}
|
||||
}).ToList()],
|
||||
Messages = [systemPrompt, ..message],
|
||||
|
||||
Stream = true,
|
||||
AdditionalApiParameters = apiParameters
|
||||
}, JSON_SERIALIZER_OPTIONS);
|
||||
|
||||
@ -36,6 +36,26 @@ public sealed class ProviderMistral() : BaseProvider("https://api.mistral.ai/v1/
|
||||
|
||||
// Parse the API parameters:
|
||||
var apiParameters = this.ParseAdditionalApiParameters();
|
||||
|
||||
// Build the list of messages:
|
||||
var messages = await chatThread.Blocks.BuildMessages(async n => new RegularMessage
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => await text.PrepareContentForAI(),
|
||||
_ => string.Empty,
|
||||
}
|
||||
});
|
||||
|
||||
// Prepare the Mistral HTTP chat request:
|
||||
var mistralChatRequest = JsonSerializer.Serialize(new ChatRequest
|
||||
@ -45,24 +65,7 @@ public sealed class ProviderMistral() : BaseProvider("https://api.mistral.ai/v1/
|
||||
// Build the messages:
|
||||
// - First of all the system prompt
|
||||
// - Then none-empty user and AI messages
|
||||
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new RegularMessage
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => text.Text,
|
||||
_ => string.Empty,
|
||||
}
|
||||
}).ToList()],
|
||||
Messages = [systemPrompt, ..messages],
|
||||
|
||||
// Right now, we only support streaming completions:
|
||||
Stream = true,
|
||||
@ -70,6 +73,7 @@ public sealed class ProviderMistral() : BaseProvider("https://api.mistral.ai/v1/
|
||||
AdditionalApiParameters = apiParameters
|
||||
}, JSON_SERIALIZER_OPTIONS);
|
||||
|
||||
|
||||
async Task<HttpRequestMessage> RequestBuilder()
|
||||
{
|
||||
// Build the HTTP post request:
|
||||
|
||||
@ -88,6 +88,26 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
|
||||
|
||||
// Parse the API parameters:
|
||||
var apiParameters = this.ParseAdditionalApiParameters("input", "store", "tools");
|
||||
|
||||
// Build the list of messages:
|
||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => systemPromptRole,
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => await text.PrepareContentForAI(),
|
||||
_ => string.Empty,
|
||||
}
|
||||
});
|
||||
|
||||
//
|
||||
// Create the request: either for the Responses API or the Chat Completion API
|
||||
@ -102,24 +122,7 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
|
||||
// Build the messages:
|
||||
// - First of all the system prompt
|
||||
// - Then none-empty user and AI messages
|
||||
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => systemPromptRole,
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => text.Text,
|
||||
_ => string.Empty,
|
||||
}
|
||||
}).ToList()],
|
||||
Messages = [systemPrompt, ..messages],
|
||||
|
||||
// Right now, we only support streaming completions:
|
||||
Stream = true,
|
||||
|
||||
@ -48,6 +48,26 @@ public sealed class ProviderPerplexity() : BaseProvider("https://api.perplexity.
|
||||
// Parse the API parameters:
|
||||
var apiParameters = this.ParseAdditionalApiParameters();
|
||||
|
||||
// Build the list of messages:
|
||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message()
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => await text.PrepareContentForAI(),
|
||||
_ => string.Empty,
|
||||
}
|
||||
});
|
||||
|
||||
// Prepare the Perplexity HTTP chat request:
|
||||
var perplexityChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
|
||||
{
|
||||
@ -56,24 +76,7 @@ public sealed class ProviderPerplexity() : BaseProvider("https://api.perplexity.
|
||||
// Build the messages:
|
||||
// - First of all the system prompt
|
||||
// - Then none-empty user and AI messages
|
||||
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => text.Text,
|
||||
_ => string.Empty,
|
||||
}
|
||||
}).ToList()],
|
||||
Messages = [systemPrompt, ..messages],
|
||||
Stream = true,
|
||||
AdditionalApiParameters = apiParameters
|
||||
}, JSON_SERIALIZER_OPTIONS);
|
||||
|
||||
@ -35,6 +35,26 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
|
||||
// Parse the API parameters:
|
||||
var apiParameters = this.ParseAdditionalApiParameters();
|
||||
|
||||
// Build the list of messages:
|
||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => await text.PrepareContentForAI(),
|
||||
_ => string.Empty,
|
||||
}
|
||||
});
|
||||
|
||||
// Prepare the OpenAI HTTP chat request:
|
||||
var providerChatRequest = JsonSerializer.Serialize(new ChatRequest
|
||||
{
|
||||
@ -43,24 +63,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
|
||||
// Build the messages:
|
||||
// - First of all the system prompt
|
||||
// - Then none-empty user and AI messages
|
||||
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => text.Text,
|
||||
_ => string.Empty,
|
||||
}
|
||||
}).ToList()],
|
||||
Messages = [systemPrompt, ..messages],
|
||||
|
||||
// Right now, we only support streaming completions:
|
||||
Stream = true,
|
||||
|
||||
@ -39,6 +39,26 @@ public sealed class ProviderX() : BaseProvider("https://api.x.ai/v1/", LOGGER)
|
||||
// Parse the API parameters:
|
||||
var apiParameters = this.ParseAdditionalApiParameters();
|
||||
|
||||
// Build the list of messages:
|
||||
var messages = await chatThread.Blocks.BuildMessages(async n => new Message()
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => await text.PrepareContentForAI(),
|
||||
_ => string.Empty,
|
||||
}
|
||||
});
|
||||
|
||||
// Prepare the xAI HTTP chat request:
|
||||
var xChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
|
||||
{
|
||||
@ -47,24 +67,7 @@ public sealed class ProviderX() : BaseProvider("https://api.x.ai/v1/", LOGGER)
|
||||
// Build the messages:
|
||||
// - First of all the system prompt
|
||||
// - Then none-empty user and AI messages
|
||||
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => text.Text,
|
||||
_ => string.Empty,
|
||||
}
|
||||
}).ToList()],
|
||||
Messages = [systemPrompt, ..messages],
|
||||
|
||||
// Right now, we only support streaming completions:
|
||||
Stream = true,
|
||||
|
||||
Loading…
Reference in New Issue
Block a user