This commit is contained in:
Peer Schütt 2025-11-14 15:44:16 +01:00
commit b71e54b106
79 changed files with 1458 additions and 668 deletions

View File

@ -1474,6 +1474,9 @@ UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T4070211974"] = "Remove
-- No, keep it
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T4188329028"] = "No, keep it"
-- Export Chat to Microsoft Word
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T861873672"] = "Export Chat to Microsoft Word"
-- Open Settings
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ASSISTANTBLOCK::T1172211894"] = "Open Settings"
@ -1792,6 +1795,12 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::PROFILESELECTION::T918741365"] = "You can
-- Provider
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::PROVIDERSELECTION::T900237532"] = "Provider"
-- Pandoc Installation
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::READFILECONTENT::T185447014"] = "Pandoc Installation"
-- Pandoc may be required for importing files.
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::READFILECONTENT::T2596465560"] = "Pandoc may be required for importing files."
-- Videos are not supported yet
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::READFILECONTENT::T2928927510"] = "Videos are not supported yet"
@ -3106,6 +3115,9 @@ UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PANDOCDIALOG::T523908375"] = "Pandoc is dist
-- Tell the AI what you want it to do for you. What are your goals or are you trying to achieve? Like having the AI address you informally.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T1458195391"] = "Tell the AI what you want it to do for you. What are your goals or are you trying to achieve? Like having the AI address you informally."
-- Please be aware that your profile info becomes part of the system prompt. This means it uses up context space — the “memory” the LLM uses to understand and respond to your request. If your profile is extremely long, the LLM may struggle to focus on your actual task.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T1717545317"] = "Please be aware that your profile info becomes part of the system prompt. This means it uses up context space — the “memory” the LLM uses to understand and respond to your request. If your profile is extremely long, the LLM may struggle to focus on your actual task."
-- Update
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T1847791252"] = "Update"
@ -3118,18 +3130,12 @@ UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T2261456575"] = "What should
-- Please enter a profile name.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T2386844536"] = "Please enter a profile name."
-- The text must not exceed 256 characters.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T2560188276"] = "The text must not exceed 256 characters."
-- Add
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T2646845972"] = "Add"
-- The profile name must not exceed 40 characters.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T3243902394"] = "The profile name must not exceed 40 characters."
-- The text must not exceed 444 characters.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T3253349421"] = "The text must not exceed 444 characters."
-- Profile Name
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T3392578705"] = "Profile Name"
@ -3154,9 +3160,15 @@ UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T900713019"] = "Cancel"
-- The profile name must be unique; the chosen name is already in use.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T911748898"] = "The profile name must be unique; the chosen name is already in use."
-- Please be aware: This section is for experts only. You are responsible for verifying the correctness of the additional parameters you provide to the API call. By default, AI Studio uses the OpenAI-compatible chat completions API, when that it is supported by the underlying service and model.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1017509792"] = "Please be aware: This section is for experts only. You are responsible for verifying the correctness of the additional parameters you provide to the API call. By default, AI Studio uses the OpenAI-compatible chat completions API, when that it is supported by the underlying service and model."
-- Hugging Face Inference Provider
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1085481431"] = "Hugging Face Inference Provider"
-- Hide Expert Settings
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1108876344"] = "Hide Expert Settings"
-- Failed to store the API key in the operating system. The message was: {0}. Please try again.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1122745046"] = "Failed to store the API key in the operating system. The message was: {0}. Please try again."
@ -3169,6 +3181,9 @@ UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1356621346"] = "Create acco
-- Load models
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T15352225"] = "Load models"
-- Add the parameters in proper JSON formatting, e.g., "temperature": 0.5. Remove trailing commas. The usual surrounding curly brackets {} must not be used, though.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1689135032"] = "Add the parameters in proper JSON formatting, e.g., \"temperature\": 0.5. Remove trailing commas. The usual surrounding curly brackets {} must not be used, though."
-- Hostname
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1727440780"] = "Hostname"
@ -3190,12 +3205,18 @@ UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T2331453405"] = "(Optional)
-- Add
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T2646845972"] = "Add"
-- Additional API parameters
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T2728244552"] = "Additional API parameters"
-- No models loaded or available.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T2810182573"] = "No models loaded or available."
-- Instance Name
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T2842060373"] = "Instance Name"
-- Show Expert Settings
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T3361153305"] = "Show Expert Settings"
-- Show available models
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T3763891899"] = "Show available models"
@ -4033,6 +4054,9 @@ UI_TEXT_CONTENT["AISTUDIO::DIALOGS::SETTINGS::SETTINGSDIALOGPROFILES::T380451542
-- Actions
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::SETTINGS::SETTINGSDIALOGPROFILES::T3865031940"] = "Actions"
-- This profile is managed by your organization.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::SETTINGS::SETTINGSDIALOGPROFILES::T4058414654"] = "This profile is managed by your organization."
-- Store personal data about yourself in various profiles so that the AIs know your personal context. This saves you from having to explain your context each time, for example, in every chat. When you have different roles, you can create a profile for each role.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::SETTINGS::SETTINGSDIALOGPROFILES::T4125557797"] = "Store personal data about yourself in various profiles so that the AIs know your personal context. This saves you from having to explain your context each time, for example, in every chat. When you have different roles, you can create a profile for each role."
@ -4972,8 +4996,8 @@ UI_TEXT_CONTENT["AISTUDIO::PROVIDER::BASEPROVIDER::T1674355816"] = "Tried to com
-- Tried to stream the LLM provider '{0}' answer. Was not able to read the stream. The message is: '{1}'
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::BASEPROVIDER::T1856278860"] = "Tried to stream the LLM provider '{0}' answer. Was not able to read the stream. The message is: '{1}'"
-- Tried to communicate with the LLM provider '{0}'. Even after {1} retries, there were some problems with the request. The provider message is: '{2}'
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::BASEPROVIDER::T2249520705"] = "Tried to communicate with the LLM provider '{0}'. Even after {1} retries, there were some problems with the request. The provider message is: '{2}'"
-- Tried to communicate with the LLM provider '{0}'. Even after {1} retries, there were some problems with the request. The provider message is: '{2}'.
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::BASEPROVIDER::T2181034173"] = "Tried to communicate with the LLM provider '{0}'. Even after {1} retries, there were some problems with the request. The provider message is: '{2}'."
-- Tried to communicate with the LLM provider '{0}'. Something was not found. The provider message is: '{1}'
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::BASEPROVIDER::T2780552614"] = "Tried to communicate with the LLM provider '{0}'. Something was not found. The provider message is: '{1}'"
@ -5521,6 +5545,18 @@ UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T567205144"] = "It seems that Pandoc i
-- The latest Pandoc version was not found, installing version {0} instead.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T726914939"] = "The latest Pandoc version was not found, installing version {0} instead."
-- Pandoc is required for Microsoft Word export.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOCEXPORT::T1473115556"] = "Pandoc is required for Microsoft Word export."
-- Pandoc Installation
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOCEXPORT::T185447014"] = "Pandoc Installation"
-- Error during Microsoft Word export
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOCEXPORT::T3290596792"] = "Error during Microsoft Word export"
-- Microsoft Word export successful
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOCEXPORT::T4256043333"] = "Microsoft Word export successful"
-- The table AUTHORS does not exist or is using an invalid syntax.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PLUGINSYSTEM::PLUGINBASE::T1068328139"] = "The table AUTHORS does not exist or is using an invalid syntax."

View File

@ -85,7 +85,7 @@ public partial class AssistantMyTasks : AssistantBaseCore<SettingsDialogMyTasks>
private string? ValidateProfile(Profile profile)
{
if(profile == default || profile == Profile.NO_PROFILE)
if(profile == Profile.NO_PROFILE)
return T("Please select one of your profiles.");
return null;

View File

@ -2,6 +2,7 @@
@using MudBlazor
@using AIStudio.Components
@inherits AIStudio.Components.MSGComponentBase
<MudCard Class="@this.CardClasses" Outlined="@true">
<MudCardHeader>
<CardHeaderAvatar>
@ -47,6 +48,13 @@
<MudIconButton Icon="@Icons.Material.Filled.Delete" Color="Color.Error" OnClick="@this.RemoveBlock"/>
</MudTooltip>
}
@if (this.Role is ChatRole.AI)
{
<MudTooltip Text="@T("Export Chat to Microsoft Word")" Placement="Placement.Bottom">
<MudIconButton Icon="@Icons.Material.Filled.Save" OnClick="@this.ExportToWord"/>
</MudTooltip>
}
<MudCopyClipboardButton Content="@this.Content" Type="@this.Type" Size="Size.Medium"/>
</CardHeaderActions>
</MudCardHeader>

View File

@ -1,5 +1,5 @@
using AIStudio.Components;
using AIStudio.Tools.Services;
using Microsoft.AspNetCore.Components;
namespace AIStudio.Chat;
@ -63,6 +63,9 @@ public partial class ContentBlockComponent : MSGComponentBase
[Inject]
private IDialogService DialogService { get; init; } = null!;
[Inject]
private RustService RustService { get; init; } = null!;
private bool HideContent { get; set; }
#region Overrides of ComponentBase
@ -133,6 +136,11 @@ public partial class ContentBlockComponent : MSGComponentBase
await this.RemoveBlockFunc(this.Content);
}
private async Task ExportToWord()
{
await PandocExport.ToMicrosoftWord(this.RustService, this.DialogService, T("Export Chat to Microsoft Word"), this.Content);
}
private async Task RegenerateBlock()
{
if (this.RegenerateFunc is null)
@ -179,4 +187,5 @@ public partial class ContentBlockComponent : MSGComponentBase
if (edit.HasValue && edit.Value)
await this.EditLastUserBlockFunc(this.Content);
}
}

View File

@ -13,6 +13,8 @@ public partial class Changelog
public static readonly Log[] LOGS =
[
new (228, "v0.9.53, build 228 (2025-11-14 13:14 UTC)", "v0.9.53.md"),
new (227, "v0.9.52, build 227 (2025-10-24 06:00 UTC)", "v0.9.52.md"),
new (226, "v0.9.51, build 226 (2025-09-04 18:02 UTC)", "v0.9.51.md"),
new (225, "v0.9.50, build 225 (2025-08-10 16:40 UTC)", "v0.9.50.md"),
new (224, "v0.9.49, build 224 (2025-07-02 12:12 UTC)", "v0.9.49.md"),

View File

@ -801,11 +801,7 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
// Try to select the profile:
if (!string.IsNullOrWhiteSpace(chatProfile))
{
this.currentProfile = this.SettingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == chatProfile);
if(this.currentProfile == default)
this.currentProfile = Profile.NO_PROFILE;
}
this.currentProfile = this.SettingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == chatProfile) ?? Profile.NO_PROFILE;
// Try to select the chat template:
if (!string.IsNullOrWhiteSpace(chatChatTemplate))

View File

@ -19,7 +19,7 @@
<MudDivider/>
@foreach (var profile in this.SettingsManager.ConfigurationData.Profiles.GetAllProfiles())
{
<MudMenuItem Icon="@Icons.Material.Filled.Person4" OnClick="() => this.SelectionChanged(profile)">
<MudMenuItem Icon="@this.ProfileIcon(profile)" OnClick="() => this.SelectionChanged(profile)">
@profile.Name
</MudMenuItem>
}

View File

@ -38,6 +38,14 @@ public partial class ProfileSelection : MSGComponentBase
private string MarginClass => $"{this.MarginLeft} {this.MarginRight}";
private string ProfileIcon(Profile profile)
{
if (profile.IsEnterpriseConfiguration)
return Icons.Material.Filled.Business;
return Icons.Material.Filled.Person4;
}
private async Task SelectionChanged(Profile profile)
{
this.CurrentProfile = profile;

View File

@ -1,8 +1,11 @@
using AIStudio.Dialogs;
using AIStudio.Tools.Rust;
using AIStudio.Tools.Services;
using Microsoft.AspNetCore.Components;
using DialogOptions = AIStudio.Dialogs.DialogOptions;
namespace AIStudio.Components;
public partial class ReadFileContent : MSGComponentBase
@ -19,6 +22,12 @@ public partial class ReadFileContent : MSGComponentBase
[Inject]
private RustService RustService { get; init; } = null!;
[Inject]
private IDialogService DialogService { get; init; } = null!;
[Inject]
private ILogger<ReadFileContent> Logger { get; init; } = null!;
private async Task SelectFile()
{
var selectedFile = await this.RustService.SelectFile(T("Select file to read its content"));
@ -47,6 +56,26 @@ public partial class ReadFileContent : MSGComponentBase
return;
}
// Ensure that Pandoc is installed and ready:
var pandocState = await Pandoc.CheckAvailabilityAsync(this.RustService, showSuccessMessage: false);
if (!pandocState.IsAvailable)
{
var dialogParameters = new DialogParameters<PandocDialog>
{
{ x => x.ShowInitialResultInSnackbar, false },
};
var dialogReference = await this.DialogService.ShowAsync<PandocDialog>(T("Pandoc Installation"), dialogParameters, DialogOptions.FULLSCREEN);
await dialogReference.Result;
pandocState = await Pandoc.CheckAvailabilityAsync(this.RustService, showSuccessMessage: true);
if (!pandocState.IsAvailable)
{
this.Logger.LogError("Pandoc is not available after installation attempt.");
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Cancel, T("Pandoc may be required for importing files.")));
}
}
var fileContent = await this.RustService.ReadArbitraryFileData(selectedFile.SelectedFilePath, int.MaxValue);
await this.FileContentChanged.InvokeAsync(fileContent);
}

View File

@ -28,5 +28,5 @@
}
<ConfigurationProviderSelection Component="Components.APP_SETTINGS" Data="@this.AvailableLLMProvidersFunc()" SelectedValue="@(() => this.SettingsManager.ConfigurationData.App.PreselectedProvider)" SelectionUpdate="@(selectedValue => this.SettingsManager.ConfigurationData.App.PreselectedProvider = selectedValue)" HelpText="@(() => T("Would you like to set one provider as the default for the entire app? When you configure a different provider for an assistant, it will always take precedence."))"/>
<ConfigurationSelect OptionDescription="@T("Preselect one of your profiles?")" SelectedValue="@(() => this.SettingsManager.ConfigurationData.App.PreselectedProfile)" Data="@ConfigurationSelectDataFactory.GetProfilesData(this.SettingsManager.ConfigurationData.Profiles)" SelectionUpdate="@(selectedValue => this.SettingsManager.ConfigurationData.App.PreselectedProfile = selectedValue)" OptionHelp="@T("Would you like to set one of your profiles as the default for the entire app? When you configure a different profile for an assistant, it will always take precedence.")"/>
<ConfigurationSelect OptionDescription="@T("Preselect one of your profiles?")" SelectedValue="@(() => this.SettingsManager.ConfigurationData.App.PreselectedProfile)" Data="@ConfigurationSelectDataFactory.GetProfilesData(this.SettingsManager.ConfigurationData.Profiles)" SelectionUpdate="@(selectedValue => this.SettingsManager.ConfigurationData.App.PreselectedProfile = selectedValue)" OptionHelp="@T("Would you like to set one of your profiles as the default for the entire app? When you configure a different profile for an assistant, it will always take precedence.")" IsLocked="() => ManagedConfiguration.TryGet(x => x.App, x => x.PreselectedProfile, out var meta) && meta.IsLocked"/>
</ExpansionPanel>

View File

@ -72,6 +72,7 @@ public partial class SettingsPanelProviders : SettingsPanelBase
{ x => x.IsEditing, true },
{ x => x.DataHost, provider.Host },
{ x => x.HFInferenceProviderId, provider.HFInferenceProvider },
{ x => x.AdditionalJsonApiParameters, provider.AdditionalJsonApiParameters },
};
var dialogReference = await this.DialogService.ShowAsync<ProviderDialog>(T("Edit LLM Provider"), dialogParameters, DialogOptions.FULLSCREEN);

View File

@ -42,12 +42,10 @@
Lines="6"
AutoGrow="@true"
MaxLines="12"
MaxLength="444"
Counter="444"
Class="mb-3"
UserAttributes="@SPELLCHECK_ATTRIBUTES"
HelperText="@T("Tell the AI something about yourself. What is your profession? How experienced are you in this profession? Which technologies do you like?")"
/>
<ReadFileContent @bind-FileContent="@this.DataNeedToKnow"/>
<MudTextField
T="string"
@ -61,13 +59,16 @@
Lines="6"
AutoGrow="@true"
MaxLines="12"
MaxLength="256"
Counter="256"
Class="mb-3"
Class="mt-10"
UserAttributes="@SPELLCHECK_ATTRIBUTES"
HelperText="@T("Tell the AI what you want it to do for you. What are your goals or are you trying to achieve? Like having the AI address you informally.")"
/>
<ReadFileContent @bind-FileContent="@this.DataActions"/>
<MudJustifiedText Typo="Typo.body2" Class="mb-3 mt-3">
@T("Please be aware that your profile info becomes part of the system prompt. This means it uses up context space — the “memory” the LLM uses to understand and respond to your request. If your profile is extremely long, the LLM may struggle to focus on your actual task.")
</MudJustifiedText>
</MudForm>
<Issues IssuesData="@this.dataIssues"/>
</DialogContent>

View File

@ -67,10 +67,12 @@ public partial class ProfileDialog : MSGComponentBase
{
Num = this.DataNum,
Id = this.DataId,
Name = this.DataName,
NeedToKnow = this.DataNeedToKnow,
Actions = this.DataActions,
EnterpriseConfigurationPluginId = Guid.Empty,
IsEnterpriseConfiguration = false,
};
#region Overrides of ComponentBase
@ -129,9 +131,6 @@ public partial class ProfileDialog : MSGComponentBase
if (string.IsNullOrWhiteSpace(this.DataNeedToKnow) && string.IsNullOrWhiteSpace(this.DataActions))
return T("Please enter what the LLM should know about you and/or what actions it should take.");
if(text.Length > 444)
return T("The text must not exceed 444 characters.");
return null;
}
@ -140,9 +139,6 @@ public partial class ProfileDialog : MSGComponentBase
if (string.IsNullOrWhiteSpace(this.DataNeedToKnow) && string.IsNullOrWhiteSpace(this.DataActions))
return T("Please enter what the LLM should know about you and/or what actions it should take.");
if(text.Length > 256)
return T("The text must not exceed 256 characters.");
return null;
}

View File

@ -130,6 +130,18 @@
UserAttributes="@SPELLCHECK_ATTRIBUTES"
/>
<MudStack>
<MudButton OnClick="@this.ToggleExpertSettings">
@(this.showExpertSettings ? T("Hide Expert Settings") : T("Show Expert Settings"))
</MudButton>
<MudDivider />
<MudCollapse Expanded="@this.showExpertSettings" Class="@this.GetExpertStyles">
<MudJustifiedText Class="mb-5">
@T("Please be aware: This section is for experts only. You are responsible for verifying the correctness of the additional parameters you provide to the API call. By default, AI Studio uses the OpenAI-compatible chat completions API, when that it is supported by the underlying service and model.")
</MudJustifiedText>
<MudTextField T="string" Label=@T("Additional API parameters") Variant="Variant.Outlined" Lines="4" AutoGrow="true" MaxLines="10" HelperText=@T("""Add the parameters in proper JSON formatting, e.g., "temperature": 0.5. Remove trailing commas. The usual surrounding curly brackets {} must not be used, though.""") Placeholder="@GetPlaceholderExpertSettings" @bind-Value="@this.AdditionalJsonApiParameters" OnBlur="@this.OnInputChangeExpertSettings"/>
</MudCollapse>
</MudStack>
</MudForm>
<Issues IssuesData="@this.dataIssues"/>
</DialogContent>

View File

@ -78,6 +78,9 @@ public partial class ProviderDialog : MSGComponentBase, ISecretId
[Parameter]
public bool IsEditing { get; init; }
[Parameter]
public string AdditionalJsonApiParameters { get; set; } = string.Empty;
[Inject]
private RustService RustService { get; init; } = null!;
@ -94,6 +97,7 @@ public partial class ProviderDialog : MSGComponentBase, ISecretId
private string dataManuallyModel = string.Empty;
private string dataAPIKeyStorageIssue = string.Empty;
private string dataEditingPreviousInstanceName = string.Empty;
private bool showExpertSettings;
// We get the form reference from Blazor code to validate it manually:
private MudForm form = null!;
@ -135,6 +139,7 @@ public partial class ProviderDialog : MSGComponentBase, ISecretId
Hostname = cleanedHostname.EndsWith('/') ? cleanedHostname[..^1] : cleanedHostname,
Host = this.DataHost,
HFInferenceProvider = this.HFInferenceProviderId,
AdditionalJsonApiParameters = this.AdditionalJsonApiParameters,
};
}
@ -149,6 +154,8 @@ public partial class ProviderDialog : MSGComponentBase, ISecretId
#pragma warning disable MWAIS0001
this.UsedInstanceNames = this.SettingsManager.ConfigurationData.Providers.Select(x => x.InstanceName.ToLowerInvariant()).ToList();
#pragma warning restore MWAIS0001
this.showExpertSettings = !string.IsNullOrWhiteSpace(this.AdditionalJsonApiParameters);
// When editing, we need to load the data:
if(this.IsEditing)
@ -268,4 +275,20 @@ public partial class ProviderDialog : MSGComponentBase, ISecretId
LLMProviders.SELF_HOSTED => T("(Optional) API Key"),
_ => T("API Key"),
};
private void ToggleExpertSettings() => this.showExpertSettings = !this.showExpertSettings;
private void OnInputChangeExpertSettings()
{
this.AdditionalJsonApiParameters = this.AdditionalJsonApiParameters.Trim().TrimEnd(',', ' ');
}
private string GetExpertStyles => this.showExpertSettings ? "border-2 border-dashed rounded pa-2" : string.Empty;
private static string GetPlaceholderExpertSettings =>
"""
"temperature": 0.5,
"top_p": 0.9,
"frequency_penalty": 0.0
""";
}

View File

@ -30,14 +30,23 @@
<MudTd>@context.Num</MudTd>
<MudTd>@context.Name</MudTd>
<MudTd>
<MudStack Row="true" Class="mb-2 mt-2" Wrap="Wrap.Wrap">
<MudTooltip Text="@T("Edit")">
<MudIconButton Color="Color.Info" Icon="@Icons.Material.Filled.Edit" OnClick="() => this.EditProfile(context)"/>
@if (context.IsEnterpriseConfiguration)
{
<MudTooltip Text="@T("This profile is managed by your organization.")">
<MudIconButton Color="Color.Info" Icon="@Icons.Material.Filled.Business" Disabled="true"/>
</MudTooltip>
<MudTooltip Text="@T("Delete")">
<MudIconButton Color="Color.Error" Icon="@Icons.Material.Filled.Delete" OnClick="() => this.DeleteProfile(context)"/>
</MudTooltip>
</MudStack>
}
else
{
<MudStack Row="true" Class="mb-2 mt-2" Wrap="Wrap.Wrap">
<MudTooltip Text="@T("Edit")">
<MudIconButton Color="Color.Info" Icon="@Icons.Material.Filled.Edit" OnClick="() => this.EditProfile(context)"/>
</MudTooltip>
<MudTooltip Text="@T("Delete")">
<MudIconButton Color="Color.Error" Icon="@Icons.Material.Filled.Delete" OnClick="() => this.DeleteProfile(context)"/>
</MudTooltip>
</MudStack>
}
</MudTd>
</RowTemplate>
</MudTable>

View File

@ -47,12 +47,12 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="CodeBeam.MudBlazor.Extensions" Version="8.2.4" />
<PackageReference Include="HtmlAgilityPack" Version="1.12.2" />
<PackageReference Include="Microsoft.Extensions.FileProviders.Embedded" Version="9.0.8" />
<PackageReference Include="CodeBeam.MudBlazor.Extensions" Version="8.2.5" />
<PackageReference Include="HtmlAgilityPack" Version="1.12.4" />
<PackageReference Include="Microsoft.Extensions.FileProviders.Embedded" Version="9.0.11" />
<PackageReference Include="MudBlazor" Version="8.12.0" />
<PackageReference Include="MudBlazor.Markdown" Version="8.11.0" />
<PackageReference Include="ReverseMarkdown" Version="4.7.0" />
<PackageReference Include="ReverseMarkdown" Version="4.7.1" />
<PackageReference Include="LuaCSharp" Version="0.4.2" />
</ItemGroup>

View File

@ -47,15 +47,24 @@ DEPRECATION_MESSAGE = ""
CONFIG = {}
CONFIG["LLM_PROVIDERS"] = {}
-- An example of a configuration for a self-hosted ollama server:
-- An example of a configuration for a self-hosted server:
CONFIG["LLM_PROVIDERS"][#CONFIG["LLM_PROVIDERS"]+1] = {
["Id"] = "00000000-0000-0000-0000-000000000000",
["InstanceName"] = "<user-friendly name for the combination of server and model>",
["UsedLLMProvider"] = "SELF_HOSTED",
-- Allowed values for Host are: LM_STUDIO, LLAMACPP, OLLAMA, and VLLM
["Host"] = "OLLAMA",
["Hostname"] = "<https address of the ollama server>",
["Hostname"] = "<https address of the server>",
-- Optional: Additional parameters for the API.
-- Please refer to the documentation of the selected host for details.
-- Might be something like ... \"temperature\": 0.5 ... for one parameter.
-- Could be something like ... \"temperature\": 0.5, \"max_tokens\": 1000 ... for multiple parameters.
-- Please do not add the enclosing curly braces {} here. Also, no trailing comma is allowed.
["AdditionalJsonApiParameters"] = "",
["Model"] = {
["Id"] = "<the ollama model ID>",
["Id"] = "<the model ID>",
["DisplayName"] = "<user-friendly name of the model>",
}
}
@ -86,6 +95,13 @@ CONFIG["SETTINGS"] = {}
-- Examples are PRE_WRITER_MODE_2024, PRE_RAG_2024, PRE_DOCUMENT_ANALYSIS_2025.
-- CONFIG["SETTINGS"]["DataApp.EnabledPreviewFeatures"] = { "PRE_RAG_2024", "PRE_DOCUMENT_ANALYSIS_2025" }
-- Configure the preselected profile.
-- It must be one of the profile IDs defined in CONFIG["PROFILES"].
-- Be aware that the ID must be using the same casing as defined in the profile.
-- When the ID is using upper case letters, but using lower case letters in this
-- setting, the preselection will not work.
-- CONFIG["SETTINGS"]["DataApp.PreselectedProfile"] = "00000000-0000-0000-0000-000000000000"
-- Example chat templates for this configuration:
CONFIG["CHAT_TEMPLATES"] = {}
@ -109,3 +125,14 @@ CONFIG["CHAT_TEMPLATES"][#CONFIG["CHAT_TEMPLATES"]+1] = {
}
}
}
-- Profiles for this configuration:
CONFIG["PROFILES"] = {}
-- A simple profile template:
CONFIG["PROFILES"][#CONFIG["PROFILES"]+1] = {
["Id"] = "00000000-0000-0000-0000-000000000000",
["Name"] = "<user-friendly name of the profile>",
["NeedToKnow"] = "I like to cook in my free time. My favorite meal is ...",
["Actions"] = "Please always ensure the portion size is ..."
}

View File

@ -1476,6 +1476,9 @@ UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T4070211974"] = "Nachric
-- No, keep it
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T4188329028"] = "Nein, behalten"
-- Export Chat to Microsoft Word
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T861873672"] = "Chat in Microsoft Word exportieren"
-- Open Settings
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ASSISTANTBLOCK::T1172211894"] = "Einstellungen öffnen"
@ -1797,6 +1800,12 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::PROVIDERSELECTION::T900237532"] = "Anbiet
-- Videos are not supported yet
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::READFILECONTENT::T2928927510"] = "Videos werden noch nicht unterstützt."
-- Pandoc Installation
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::READFILECONTENT::T185447014"] = "Pandoc-Installation"
-- Pandoc may be required for importing files.
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::READFILECONTENT::T2596465560"] = "Pandoc wird möglicherweise zum Importieren von Dateien benötigt."
-- Images are not supported yet
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::READFILECONTENT::T298062956"] = "Bilder werden derzeit nicht unterstützt"
@ -3108,6 +3117,9 @@ UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PANDOCDIALOG::T523908375"] = "Pandoc wird un
-- Tell the AI what you want it to do for you. What are your goals or are you trying to achieve? Like having the AI address you informally.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T1458195391"] = "Teilen Sie der KI mit, was sie machen soll. Was sind ihre Ziele oder was möchten Sie erreichen? Zum Beispiel, dass die KI Sie duzt."
-- Please be aware that your profile info becomes part of the system prompt. This means it uses up context space — the “memory” the LLM uses to understand and respond to your request. If your profile is extremely long, the LLM may struggle to focus on your actual task.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T1717545317"] = "Bitte beachten Sie, dass Ihre Profilinformationen Teil des System-Prompts werden. Das bedeutet, sie belegen einen Teil des Kontexts den „Speicher“, den das LLM nutzt, um Ihre Anfrage zu verstehen und darauf zu antworten. Wenn Ihr Profil extrem lang ist, kann das LLM Schwierigkeiten haben, die Aufgabe auszuführen."
-- Update
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T1847791252"] = "Aktualisieren"
@ -3120,18 +3132,12 @@ UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T2261456575"] = "Was soll die
-- Please enter a profile name.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T2386844536"] = "Bitte geben Sie einen Profilnamen ein."
-- The text must not exceed 256 characters.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T2560188276"] = "Der Text darf 256 Zeichen nicht überschreiten."
-- Add
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T2646845972"] = "Hinzufügen"
-- The profile name must not exceed 40 characters.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T3243902394"] = "Der Profilname darf nicht länger als 40 Zeichen sein."
-- The text must not exceed 444 characters.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T3253349421"] = "Der Text darf 444 Zeichen nicht überschreiten."
-- Profile Name
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T3392578705"] = "Profilname"
@ -3156,9 +3162,15 @@ UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T900713019"] = "Abbrechen"
-- The profile name must be unique; the chosen name is already in use.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T911748898"] = "Der Profilname muss eindeutig sein; der ausgewählte Name wird bereits verwendet."
-- Please be aware: This section is for experts only. You are responsible for verifying the correctness of the additional parameters you provide to the API call. By default, AI Studio uses the OpenAI-compatible chat completions API, when that it is supported by the underlying service and model.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1017509792"] = "Bitte beachten Sie: Dieser Bereich ist nur für Expertinnen und Experten. Sie sind dafür verantwortlich, die Korrektheit der zusätzlichen Parameter zu überprüfen, die Sie beim APIAufruf angeben. Standardmäßig verwendet AI Studio die OpenAIkompatible Chat Completions-API, sofern diese vom zugrunde liegenden Dienst und Modell unterstützt wird."
-- Hugging Face Inference Provider
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1085481431"] = "Hugging Face Inferenz-Anbieter"
-- Hide Expert Settings
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1108876344"] = "Experten-Einstellungen ausblenden"
-- Failed to store the API key in the operating system. The message was: {0}. Please try again.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1122745046"] = "Der API-Schlüssel konnte nicht im Betriebssystem gespeichert werden. Die Meldung war: {0}. Bitte versuchen Sie es erneut."
@ -3171,6 +3183,9 @@ UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1356621346"] = "Konto erste
-- Load models
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T15352225"] = "Modelle laden"
-- Add the parameters in proper JSON formatting, e.g., "temperature": 0.5. Remove trailing commas. The usual surrounding curly brackets {} must not be used, though.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1689135032"] = "Fügen Sie die Parameter in korrekter JSON-Formatierung hinzu, z. B. \"temperature\": 0.5. Entfernen Sie abschließende Kommas. Die üblichen äußeren geschweiften Klammern {} dürfen dabei jedoch nicht verwendet werden."
-- Hostname
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1727440780"] = "Hostname"
@ -3192,12 +3207,18 @@ UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T2331453405"] = "(Optional)
-- Add
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T2646845972"] = "Hinzufügen"
-- Additional API parameters
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T2728244552"] = "Zusätzliche API-Parameter"
-- No models loaded or available.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T2810182573"] = "Keine Modelle geladen oder verfügbar."
-- Instance Name
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T2842060373"] = "Instanzname"
-- Show Expert Settings
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T3361153305"] = "Experten-Einstellungen anzeigen"
-- Show available models
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T3763891899"] = "Verfügbare Modelle anzeigen"
@ -4035,6 +4056,9 @@ UI_TEXT_CONTENT["AISTUDIO::DIALOGS::SETTINGS::SETTINGSDIALOGPROFILES::T380451542
-- Actions
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::SETTINGS::SETTINGSDIALOGPROFILES::T3865031940"] = "Aktionen"
-- This profile is managed by your organization.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::SETTINGS::SETTINGSDIALOGPROFILES::T4058414654"] = "Dieses Profil wird von Ihrer Organisation verwaltet."
-- Store personal data about yourself in various profiles so that the AIs know your personal context. This saves you from having to explain your context each time, for example, in every chat. When you have different roles, you can create a profile for each role.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::SETTINGS::SETTINGSDIALOGPROFILES::T4125557797"] = "Speichern Sie persönliche Daten über sich in verschiedenen Profilen, damit die KIs ihren persönlichen Kontext kennen. So müssen Sie den Kontext nicht jedes Mal erneut erklären, zum Beispiel in jedem Chat. Wenn Sie verschiedene Rollen haben, können Sie für jede Rolle ein eigenes Profil anlegen."
@ -4974,8 +4998,8 @@ UI_TEXT_CONTENT["AISTUDIO::PROVIDER::BASEPROVIDER::T1674355816"] = "Es wurde ver
-- Tried to stream the LLM provider '{0}' answer. Was not able to read the stream. The message is: '{1}'
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::BASEPROVIDER::T1856278860"] = "Der Versuch, die Antwort des LLM-Anbieters '{0}' zu streamen, ist fehlgeschlagen. Der Stream konnte nicht gelesen werden. Die Meldung lautet: '{1}'"
-- Tried to communicate with the LLM provider '{0}'. Even after {1} retries, there were some problems with the request. The provider message is: '{2}'
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::BASEPROVIDER::T2249520705"] = "Es wurde versucht, mit dem LLM-Anbieter '{0}' zu kommunizieren. Auch nach {1} Versuchen gab es Probleme mit der Anfrage. Die Nachricht des Anbieters lautet: '{2}'"
-- Tried to communicate with the LLM provider '{0}'. Even after {1} retries, there were some problems with the request. The provider message is: '{2}'.
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::BASEPROVIDER::T2181034173"] = "Versuchte, mit dem LLM-Anbieter '{0}' zu kommunizieren. Auch nach {1} Wiederholungsversuchen gab es Probleme mit der Anfrage. Die Meldung des Anbieters lautet: '{2}'."
-- Tried to communicate with the LLM provider '{0}'. Something was not found. The provider message is: '{1}'
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::BASEPROVIDER::T2780552614"] = "Es wurde versucht, mit dem LLM-Anbieter '{0}' zu kommunizieren. Etwas wurde nicht gefunden. Die Meldung des Anbieters lautet: '{1}'"
@ -5523,6 +5547,18 @@ UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T567205144"] = "Es scheint, dass Pando
-- The latest Pandoc version was not found, installing version {0} instead.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T726914939"] = "Die neueste Pandoc-Version wurde nicht gefunden, stattdessen wird Version {0} installiert."
-- Pandoc is required for Microsoft Word export.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOCEXPORT::T1473115556"] = "Pandoc wird für den Export nach Microsoft Word benötigt."
-- Pandoc Installation
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOCEXPORT::T185447014"] = "Pandoc-Installation"
-- Error during Microsoft Word export
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOCEXPORT::T3290596792"] = "Fehler beim Exportieren nach Microsoft Word"
-- Microsoft Word export successful
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOCEXPORT::T4256043333"] = "Export nach Microsoft Word erfolgreich"
-- The table AUTHORS does not exist or is using an invalid syntax.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PLUGINSYSTEM::PLUGINBASE::T1068328139"] = "Die Tabelle AUTHORS existiert nicht oder verwendet eine ungültige Syntax."

View File

@ -1476,6 +1476,9 @@ UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T4070211974"] = "Remove
-- No, keep it
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T4188329028"] = "No, keep it"
-- Export Chat to Microsoft Word
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T861873672"] = "Export Chat to Microsoft Word"
-- Open Settings
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::ASSISTANTBLOCK::T1172211894"] = "Open Settings"
@ -1797,6 +1800,12 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::PROVIDERSELECTION::T900237532"] = "Provid
-- Videos are not supported yet
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::READFILECONTENT::T2928927510"] = "Videos are not supported yet"
-- Pandoc Installation
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::READFILECONTENT::T185447014"] = "Pandoc Installation"
-- Pandoc may be required for importing files.
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::READFILECONTENT::T2596465560"] = "Pandoc may be required for importing files."
-- Images are not supported yet
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::READFILECONTENT::T298062956"] = "Images are not supported yet"
@ -3108,6 +3117,9 @@ UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PANDOCDIALOG::T523908375"] = "Pandoc is dist
-- Tell the AI what you want it to do for you. What are your goals or are you trying to achieve? Like having the AI address you informally.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T1458195391"] = "Tell the AI what you want it to do for you. What are your goals or are you trying to achieve? Like having the AI address you informally."
-- Please be aware that your profile info becomes part of the system prompt. This means it uses up context space — the “memory” the LLM uses to understand and respond to your request. If your profile is extremely long, the LLM may struggle to focus on your actual task.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T1717545317"] = "Please be aware that your profile info becomes part of the system prompt. This means it uses up context space — the “memory” the LLM uses to understand and respond to your request. If your profile is extremely long, the LLM may struggle to focus on your actual task."
-- Update
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T1847791252"] = "Update"
@ -3120,18 +3132,12 @@ UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T2261456575"] = "What should
-- Please enter a profile name.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T2386844536"] = "Please enter a profile name."
-- The text must not exceed 256 characters.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T2560188276"] = "The text must not exceed 256 characters."
-- Add
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T2646845972"] = "Add"
-- The profile name must not exceed 40 characters.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T3243902394"] = "The profile name must not exceed 40 characters."
-- The text must not exceed 444 characters.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T3253349421"] = "The text must not exceed 444 characters."
-- Profile Name
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T3392578705"] = "Profile Name"
@ -3156,9 +3162,15 @@ UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T900713019"] = "Cancel"
-- The profile name must be unique; the chosen name is already in use.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROFILEDIALOG::T911748898"] = "The profile name must be unique; the chosen name is already in use."
-- Please be aware: This section is for experts only. You are responsible for verifying the correctness of the additional parameters you provide to the API call. By default, AI Studio uses the OpenAI-compatible chat completions API, when that it is supported by the underlying service and model.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1017509792"] = "Please be aware: This section is for experts only. You are responsible for verifying the correctness of the additional parameters you provide to the API call. By default, AI Studio uses the OpenAI-compatible chat completions API, when that it is supported by the underlying service and model."
-- Hugging Face Inference Provider
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1085481431"] = "Hugging Face Inference Provider"
-- Hide Expert Settings
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1108876344"] = "Hide Expert Settings"
-- Failed to store the API key in the operating system. The message was: {0}. Please try again.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1122745046"] = "Failed to store the API key in the operating system. The message was: {0}. Please try again."
@ -3171,6 +3183,9 @@ UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1356621346"] = "Create acco
-- Load models
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T15352225"] = "Load models"
-- Add the parameters in proper JSON formatting, e.g., "temperature": 0.5. Remove trailing commas. The usual surrounding curly brackets {} must not be used, though.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1689135032"] = "Add the parameters in proper JSON formatting, e.g., \"temperature\": 0.5. Remove trailing commas. The usual surrounding curly brackets {} must not be used, though."
-- Hostname
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T1727440780"] = "Hostname"
@ -3192,12 +3207,18 @@ UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T2331453405"] = "(Optional)
-- Add
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T2646845972"] = "Add"
-- Additional API parameters
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T2728244552"] = "Additional API parameters"
-- No models loaded or available.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T2810182573"] = "No models loaded or available."
-- Instance Name
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T2842060373"] = "Instance Name"
-- Show Expert Settings
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T3361153305"] = "Show Expert Settings"
-- Show available models
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T3763891899"] = "Show available models"
@ -4035,6 +4056,9 @@ UI_TEXT_CONTENT["AISTUDIO::DIALOGS::SETTINGS::SETTINGSDIALOGPROFILES::T380451542
-- Actions
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::SETTINGS::SETTINGSDIALOGPROFILES::T3865031940"] = "Actions"
-- This profile is managed by your organization.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::SETTINGS::SETTINGSDIALOGPROFILES::T4058414654"] = "This profile is managed by your organization."
-- Store personal data about yourself in various profiles so that the AIs know your personal context. This saves you from having to explain your context each time, for example, in every chat. When you have different roles, you can create a profile for each role.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::SETTINGS::SETTINGSDIALOGPROFILES::T4125557797"] = "Store personal data about yourself in various profiles so that the AIs know your personal context. This saves you from having to explain your context each time, for example, in every chat. When you have different roles, you can create a profile for each role."
@ -4974,8 +4998,8 @@ UI_TEXT_CONTENT["AISTUDIO::PROVIDER::BASEPROVIDER::T1674355816"] = "Tried to com
-- Tried to stream the LLM provider '{0}' answer. Was not able to read the stream. The message is: '{1}'
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::BASEPROVIDER::T1856278860"] = "Tried to stream the LLM provider '{0}' answer. Was not able to read the stream. The message is: '{1}'"
-- Tried to communicate with the LLM provider '{0}'. Even after {1} retries, there were some problems with the request. The provider message is: '{2}'
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::BASEPROVIDER::T2249520705"] = "Tried to communicate with the LLM provider '{0}'. Even after {1} retries, there were some problems with the request. The provider message is: '{2}'"
-- Tried to communicate with the LLM provider '{0}'. Even after {1} retries, there were some problems with the request. The provider message is: '{2}'.
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::BASEPROVIDER::T2181034173"] = "Tried to communicate with the LLM provider '{0}'. Even after {1} retries, there were some problems with the request. The provider message is: '{2}'."
-- Tried to communicate with the LLM provider '{0}'. Something was not found. The provider message is: '{1}'
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::BASEPROVIDER::T2780552614"] = "Tried to communicate with the LLM provider '{0}'. Something was not found. The provider message is: '{1}'"
@ -5523,6 +5547,18 @@ UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T567205144"] = "It seems that Pandoc i
-- The latest Pandoc version was not found, installing version {0} instead.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T726914939"] = "The latest Pandoc version was not found, installing version {0} instead."
-- Pandoc is required for Microsoft Word export.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOCEXPORT::T1473115556"] = "Pandoc is required for Microsoft Word export."
-- Pandoc Installation
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOCEXPORT::T185447014"] = "Pandoc Installation"
-- Error during Microsoft Word export
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOCEXPORT::T3290596792"] = "Error during Microsoft Word export"
-- Microsoft Word export successful
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOCEXPORT::T4256043333"] = "Microsoft Word export successful"
-- The table AUTHORS does not exist or is using an invalid syntax.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PLUGINSYSTEM::PLUGINBASE::T1068328139"] = "The table AUTHORS does not exist or is using an invalid syntax."

View File

@ -36,6 +36,9 @@ public sealed class ProviderAlibabaCloud() : BaseProvider("https://dashscope-int
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
};
// Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the AlibabaCloud HTTP chat request:
var alibabaCloudChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{
@ -63,6 +66,7 @@ public sealed class ProviderAlibabaCloud() : BaseProvider("https://dashscope-int
}
}).ToList()],
Stream = true,
AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS);
async Task<HttpRequestMessage> RequestBuilder()
@ -141,84 +145,6 @@ public sealed class ProviderAlibabaCloud() : BaseProvider("https://dashscope-int
return this.LoadModels(["text-embedding-"], token, apiKeyProvisional).ContinueWith(t => t.Result.Concat(additionalModels).OrderBy(x => x.Id).AsEnumerable(), token);
}
/// <inheritdoc />
public override IReadOnlyCollection<Capability> GetModelCapabilities(Model model)
{
var modelName = model.Id.ToLowerInvariant().AsSpan();
// Qwen models:
if (modelName.StartsWith("qwen"))
{
// Check for omni models:
if (modelName.IndexOf("omni") is not -1)
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.AUDIO_INPUT, Capability.SPEECH_INPUT,
Capability.VIDEO_INPUT,
Capability.TEXT_OUTPUT, Capability.SPEECH_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
// Check for Qwen 3:
if(modelName.StartsWith("qwen3"))
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.OPTIONAL_REASONING, Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
if(modelName.IndexOf("-vl-") is not -1)
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
}
// QwQ models:
if (modelName.StartsWith("qwq"))
{
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.ALWAYS_REASONING, Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
}
// QVQ models:
if (modelName.StartsWith("qvq"))
{
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.ALWAYS_REASONING,
Capability.CHAT_COMPLETION_API,
];
}
// Default to text input and output:
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
}
#endregion

View File

@ -1,3 +1,4 @@
using System.Text.Json.Serialization;
using AIStudio.Provider.OpenAI;
namespace AIStudio.Provider.Anthropic;
@ -16,4 +17,9 @@ public readonly record struct ChatRequest(
int MaxTokens,
bool Stream,
string System
);
)
{
// Attention: The "required" modifier is not supported for [JsonExtensionData].
[JsonExtensionData]
public IDictionary<string, object> AdditionalApiParameters { get; init; } = new Dictionary<string, object>();
}

View File

@ -26,6 +26,9 @@ public sealed class ProviderAnthropic() : BaseProvider("https://api.anthropic.co
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
if(!requestedSecret.Success)
yield break;
// Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters("system");
// Prepare the Anthropic HTTP chat request:
var chatRequest = JsonSerializer.Serialize(new ChatRequest
@ -52,10 +55,11 @@ public sealed class ProviderAnthropic() : BaseProvider("https://api.anthropic.co
}).ToList()],
System = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
MaxTokens = 4_096,
MaxTokens = apiParameters.TryGetValue("max_tokens", out var value) && value is int intValue ? intValue : 4_096,
// Right now, we only support streaming completions:
Stream = true,
AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS);
async Task<HttpRequestMessage> RequestBuilder()
@ -113,49 +117,6 @@ public sealed class ProviderAnthropic() : BaseProvider("https://api.anthropic.co
{
return Task.FromResult(Enumerable.Empty<Model>());
}
public override IReadOnlyCollection<Capability> GetModelCapabilities(Model model)
{
var modelName = model.Id.ToLowerInvariant().AsSpan();
// Claude 4.x models:
if(modelName.StartsWith("claude-opus-4") || modelName.StartsWith("claude-sonnet-4"))
return [
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.OPTIONAL_REASONING, Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// Claude 3.7 is able to do reasoning:
if(modelName.StartsWith("claude-3-7"))
return [
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.OPTIONAL_REASONING, Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// All other 3.x models are able to process text and images as input:
if(modelName.StartsWith("claude-3-"))
return [
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// Any other model is able to process text only:
return [
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
}
#endregion

View File

@ -40,7 +40,8 @@ public abstract class BaseProvider : IProvider, ISecretId
protected static readonly JsonSerializerOptions JSON_SERIALIZER_OPTIONS = new()
{
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
Converters = { new AnnotationConverter() }
Converters = { new AnnotationConverter() },
AllowTrailingCommas = false
};
/// <summary>
@ -63,7 +64,10 @@ public abstract class BaseProvider : IProvider, ISecretId
/// <inheritdoc />
public abstract string InstanceName { get; set; }
/// <inheritdoc />
public string AdditionalJsonApiParameters { get; init; } = string.Empty;
/// <inheritdoc />
public abstract IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, CancellationToken token = default);
@ -78,9 +82,6 @@ public abstract class BaseProvider : IProvider, ISecretId
/// <inheritdoc />
public abstract Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default);
/// <inheritdoc />
public abstract IReadOnlyCollection<Capability> GetModelCapabilities(Model model);
#endregion
@ -129,8 +130,7 @@ public abstract class BaseProvider : IProvider, ISecretId
if (nextResponse.StatusCode is HttpStatusCode.Forbidden)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Block, string.Format(TB("Tried to communicate with the LLM provider '{0}'. You might not be able to use this provider from your location. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
this.logger.LogError($"Failed request with status code {nextResponse.StatusCode} (message = '{nextResponse.ReasonPhrase}').");
this.logger.LogDebug($"Error body: {errorBody}");
this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
errorMessage = nextResponse.ReasonPhrase;
break;
}
@ -138,8 +138,7 @@ public abstract class BaseProvider : IProvider, ISecretId
if(nextResponse.StatusCode is HttpStatusCode.BadRequest)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. The required message format might be changed. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
this.logger.LogError($"Failed request with status code {nextResponse.StatusCode} (message = '{nextResponse.ReasonPhrase}').");
this.logger.LogDebug($"Error body: {errorBody}");
this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
errorMessage = nextResponse.ReasonPhrase;
break;
}
@ -147,8 +146,7 @@ public abstract class BaseProvider : IProvider, ISecretId
if(nextResponse.StatusCode is HttpStatusCode.NotFound)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. Something was not found. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
this.logger.LogError($"Failed request with status code {nextResponse.StatusCode} (message = '{nextResponse.ReasonPhrase}').");
this.logger.LogDebug($"Error body: {errorBody}");
this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
errorMessage = nextResponse.ReasonPhrase;
break;
}
@ -156,8 +154,7 @@ public abstract class BaseProvider : IProvider, ISecretId
if(nextResponse.StatusCode is HttpStatusCode.Unauthorized)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Key, string.Format(TB("Tried to communicate with the LLM provider '{0}'. The API key might be invalid. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
this.logger.LogError($"Failed request with status code {nextResponse.StatusCode} (message = '{nextResponse.ReasonPhrase}').");
this.logger.LogDebug($"Error body: {errorBody}");
this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
errorMessage = nextResponse.ReasonPhrase;
break;
}
@ -165,8 +162,7 @@ public abstract class BaseProvider : IProvider, ISecretId
if(nextResponse.StatusCode is HttpStatusCode.InternalServerError)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. The server might be down or having issues. The provider message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
this.logger.LogError($"Failed request with status code {nextResponse.StatusCode} (message = '{nextResponse.ReasonPhrase}').");
this.logger.LogDebug($"Error body: {errorBody}");
this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
errorMessage = nextResponse.ReasonPhrase;
break;
}
@ -174,8 +170,7 @@ public abstract class BaseProvider : IProvider, ISecretId
if(nextResponse.StatusCode is HttpStatusCode.ServiceUnavailable)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. The provider is overloaded. The message is: '{1}'"), this.InstanceName, nextResponse.ReasonPhrase)));
this.logger.LogError($"Failed request with status code {nextResponse.StatusCode} (message = '{nextResponse.ReasonPhrase}').");
this.logger.LogDebug($"Error body: {errorBody}");
this.logger.LogError("Failed request with status code {ResposeStatusCode} (message = '{ResponseReasonPhrase}', error body = '{ErrorBody}').", nextResponse.StatusCode, nextResponse.ReasonPhrase, errorBody);
errorMessage = nextResponse.ReasonPhrase;
break;
}
@ -185,13 +180,13 @@ public abstract class BaseProvider : IProvider, ISecretId
if(timeSeconds > 90)
timeSeconds = 90;
this.logger.LogDebug($"Failed request with status code {nextResponse.StatusCode} (message = '{errorMessage}'). Retrying in {timeSeconds:0.00} seconds.");
this.logger.LogDebug("Failed request with status code {ResponseStatusCode} (message = '{ErrorMessage}'). Retrying in {TimeSeconds:0.00} seconds.", nextResponse.StatusCode, errorMessage, timeSeconds);
await Task.Delay(TimeSpan.FromSeconds(timeSeconds), token);
}
if(retry >= MAX_RETRIES || !string.IsNullOrWhiteSpace(errorMessage))
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. Even after {1} retries, there were some problems with the request. The provider message is: '{2}'"), this.InstanceName, MAX_RETRIES, errorMessage)));
await MessageBus.INSTANCE.SendError(new DataErrorMessage(Icons.Material.Filled.CloudOff, string.Format(TB("Tried to communicate with the LLM provider '{0}'. Even after {1} retries, there were some problems with the request. The provider message is: '{2}'."), this.InstanceName, MAX_RETRIES, errorMessage)));
return new HttpRateLimitedStreamResult(false, true, errorMessage ?? $"Failed after {MAX_RETRIES} retries; no provider message available", response);
}
@ -522,4 +517,66 @@ public abstract class BaseProvider : IProvider, ISecretId
streamReader.Dispose();
}
/// <summary>
/// Parse and convert API parameters from a provided JSON string into a dictionary,
/// optionally merging additional parameters and removing specific keys.
/// </summary>
/// <param name="keysToRemove">Optional list of keys to remove from the final dictionary
/// (case-insensitive). The parameters stream, model, and messages are removed by default.</param>
protected IDictionary<string, object> ParseAdditionalApiParameters(
params List<string> keysToRemove)
{
if(string.IsNullOrWhiteSpace(this.AdditionalJsonApiParameters))
return new Dictionary<string, object>();
try
{
// Wrap the user-provided parameters in curly brackets to form a valid JSON object:
var json = $"{{{this.AdditionalJsonApiParameters}}}";
var jsonDoc = JsonSerializer.Deserialize<JsonElement>(json, JSON_SERIALIZER_OPTIONS);
var dict = ConvertToDictionary(jsonDoc);
// Some keys are always removed because we set them:
keysToRemove.Add("stream");
keysToRemove.Add("model");
keysToRemove.Add("messages");
// Remove the specified keys (case-insensitive):
var removeSet = new HashSet<string>(keysToRemove, StringComparer.OrdinalIgnoreCase);
foreach (var key in removeSet)
dict.Remove(key);
return dict;
}
catch (JsonException ex)
{
this.logger.LogError("Failed to parse additional API parameters: {ExceptionMessage}", ex.Message);
return new Dictionary<string, object>();
}
}
private static IDictionary<string, object> ConvertToDictionary(JsonElement element)
{
return element.EnumerateObject()
.ToDictionary<JsonProperty, string, object>(
p => p.Name,
p => ConvertJsonValue(p.Value) ?? string.Empty
);
}
private static object? ConvertJsonValue(JsonElement element) => element.ValueKind switch
{
JsonValueKind.String => element.GetString(),
JsonValueKind.Number => element.TryGetInt32(out var i) ? i :
element.TryGetInt64(out var l) ? l :
element.TryGetDouble(out var d) ? d :
element.GetDecimal(),
JsonValueKind.True or JsonValueKind.False => element.GetBoolean(),
JsonValueKind.Null => string.Empty,
JsonValueKind.Object => ConvertToDictionary(element),
JsonValueKind.Array => element.EnumerateArray().Select(ConvertJsonValue).ToList(),
_ => string.Empty,
};
}

View File

@ -36,6 +36,9 @@ public sealed class ProviderDeepSeek() : BaseProvider("https://api.deepseek.com/
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
};
// Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the DeepSeek HTTP chat request:
var deepSeekChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{
@ -63,6 +66,7 @@ public sealed class ProviderDeepSeek() : BaseProvider("https://api.deepseek.com/
}
}).ToList()],
Stream = true,
AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS);
async Task<HttpRequestMessage> RequestBuilder()
@ -108,28 +112,6 @@ public sealed class ProviderDeepSeek() : BaseProvider("https://api.deepseek.com/
return Task.FromResult(Enumerable.Empty<Model>());
}
public override IReadOnlyCollection<Capability> GetModelCapabilities(Model model)
{
var modelName = model.Id.ToLowerInvariant().AsSpan();
if(modelName.IndexOf("reasoner") is not -1)
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.ALWAYS_REASONING,
Capability.CHAT_COMPLETION_API,
];
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
}
#endregion

View File

@ -1,3 +1,5 @@
using System.Text.Json.Serialization;
namespace AIStudio.Provider.Fireworks;
/// <summary>
@ -10,4 +12,9 @@ public readonly record struct ChatRequest(
string Model,
IList<Message> Messages,
bool Stream
);
)
{
// Attention: The "required" modifier is not supported for [JsonExtensionData].
[JsonExtensionData]
public IDictionary<string, object> AdditionalApiParameters { get; init; } = new Dictionary<string, object>();
}

View File

@ -36,6 +36,9 @@ public class ProviderFireworks() : BaseProvider("https://api.fireworks.ai/infere
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
};
// Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the Fireworks HTTP chat request:
var fireworksChatRequest = JsonSerializer.Serialize(new ChatRequest
{
@ -65,6 +68,7 @@ public class ProviderFireworks() : BaseProvider("https://api.fireworks.ai/infere
// Right now, we only support streaming completions:
Stream = true,
AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS);
async Task<HttpRequestMessage> RequestBuilder()
@ -110,7 +114,5 @@ public class ProviderFireworks() : BaseProvider("https://api.fireworks.ai/infere
return Task.FromResult(Enumerable.Empty<Model>());
}
public override IReadOnlyCollection<Capability> GetModelCapabilities(Model model) => CapabilitiesOpenSource.GetCapabilities(model);
#endregion
}

View File

@ -36,6 +36,9 @@ public sealed class ProviderGWDG() : BaseProvider("https://chat-ai.academiccloud
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
};
// Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the GWDG HTTP chat request:
var gwdgChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{
@ -63,6 +66,7 @@ public sealed class ProviderGWDG() : BaseProvider("https://chat-ai.academiccloud
}
}).ToList()],
Stream = true,
AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS);
async Task<HttpRequestMessage> RequestBuilder()
@ -110,8 +114,6 @@ public sealed class ProviderGWDG() : BaseProvider("https://chat-ai.academiccloud
return models.Where(model => model.Id.StartsWith("e5-", StringComparison.InvariantCultureIgnoreCase));
}
public override IReadOnlyCollection<Capability> GetModelCapabilities(Model model) => CapabilitiesOpenSource.GetCapabilities(model);
#endregion
private async Task<IEnumerable<Model>> LoadModels(CancellationToken token, string? apiKeyProvisional = null)

View File

@ -1,3 +1,4 @@
using System.Text.Json.Serialization;
using AIStudio.Provider.OpenAI;
namespace AIStudio.Provider.Google;
@ -12,4 +13,9 @@ public readonly record struct ChatRequest(
string Model,
IList<Message> Messages,
bool Stream
);
)
{
// Attention: The "required" modifier is not supported for [JsonExtensionData].
[JsonExtensionData]
public IDictionary<string, object> AdditionalApiParameters { get; init; } = new Dictionary<string, object>();
}

View File

@ -36,6 +36,9 @@ public class ProviderGoogle() : BaseProvider("https://generativelanguage.googlea
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
};
// Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the Google HTTP chat request:
var geminiChatRequest = JsonSerializer.Serialize(new ChatRequest
{
@ -65,6 +68,7 @@ public class ProviderGoogle() : BaseProvider("https://generativelanguage.googlea
// Right now, we only support streaming completions:
Stream = true,
AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS);
async Task<HttpRequestMessage> RequestBuilder()
@ -122,94 +126,6 @@ public class ProviderGoogle() : BaseProvider("https://generativelanguage.googlea
.Select(n => new Provider.Model(n.Name.Replace("models/", string.Empty), n.DisplayName));
}
public override IReadOnlyCollection<Capability> GetModelCapabilities(Provider.Model model)
{
var modelName = model.Id.ToLowerInvariant().AsSpan();
if (modelName.IndexOf("gemini-") is not -1)
{
// Reasoning models:
if (modelName.IndexOf("gemini-2.5") is not -1)
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT, Capability.AUDIO_INPUT,
Capability.SPEECH_INPUT, Capability.VIDEO_INPUT,
Capability.TEXT_OUTPUT,
Capability.ALWAYS_REASONING, Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// Image generation:
if(modelName.IndexOf("-2.0-flash-preview-image-") is not -1)
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT, Capability.AUDIO_INPUT,
Capability.SPEECH_INPUT, Capability.VIDEO_INPUT,
Capability.TEXT_OUTPUT, Capability.IMAGE_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
// Realtime model:
if(modelName.IndexOf("-2.0-flash-live-") is not -1)
return
[
Capability.TEXT_INPUT, Capability.AUDIO_INPUT, Capability.SPEECH_INPUT,
Capability.VIDEO_INPUT,
Capability.TEXT_OUTPUT, Capability.SPEECH_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// The 2.0 flash models cannot call functions:
if(modelName.IndexOf("-2.0-flash-") is not -1)
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT, Capability.AUDIO_INPUT,
Capability.SPEECH_INPUT, Capability.VIDEO_INPUT,
Capability.TEXT_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
// The old 1.0 pro vision model:
if(modelName.IndexOf("pro-vision") is not -1)
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
// Default to all other Gemini models:
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT, Capability.AUDIO_INPUT,
Capability.SPEECH_INPUT, Capability.VIDEO_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
}
// Default for all other models:
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
}
#endregion

View File

@ -1,3 +1,4 @@
using System.Text.Json.Serialization;
using AIStudio.Provider.OpenAI;
namespace AIStudio.Provider.Groq;
@ -14,4 +15,9 @@ public readonly record struct ChatRequest(
IList<Message> Messages,
bool Stream,
int Seed
);
)
{
// Attention: The "required" modifier is not supported for [JsonExtensionData].
[JsonExtensionData]
public IDictionary<string, object> AdditionalApiParameters { get; init; } = new Dictionary<string, object>();
}

View File

@ -36,6 +36,9 @@ public class ProviderGroq() : BaseProvider("https://api.groq.com/openai/v1/", LO
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
};
// Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the OpenAI HTTP chat request:
var groqChatRequest = JsonSerializer.Serialize(new ChatRequest
{
@ -65,6 +68,7 @@ public class ProviderGroq() : BaseProvider("https://api.groq.com/openai/v1/", LO
// Right now, we only support streaming completions:
Stream = true,
AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS);
async Task<HttpRequestMessage> RequestBuilder()
@ -110,8 +114,6 @@ public class ProviderGroq() : BaseProvider("https://api.groq.com/openai/v1/", LO
return Task.FromResult(Enumerable.Empty<Model>());
}
public override IReadOnlyCollection<Capability> GetModelCapabilities(Model model) => CapabilitiesOpenSource.GetCapabilities(model);
#endregion
private async Task<IEnumerable<Model>> LoadModels(CancellationToken token, string? apiKeyProvisional = null)

View File

@ -36,6 +36,9 @@ public sealed class ProviderHelmholtz() : BaseProvider("https://api.helmholtz-bl
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
};
// Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the Helmholtz HTTP chat request:
var helmholtzChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{
@ -63,6 +66,7 @@ public sealed class ProviderHelmholtz() : BaseProvider("https://api.helmholtz-bl
}
}).ToList()],
Stream = true,
AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS);
async Task<HttpRequestMessage> RequestBuilder()
@ -114,8 +118,6 @@ public sealed class ProviderHelmholtz() : BaseProvider("https://api.helmholtz-bl
model.Id.Contains("gritlm", StringComparison.InvariantCultureIgnoreCase));
}
public override IReadOnlyCollection<Capability> GetModelCapabilities(Model model) => CapabilitiesOpenSource.GetCapabilities(model);
#endregion
private async Task<IEnumerable<Model>> LoadModels(CancellationToken token, string? apiKeyProvisional = null)

View File

@ -41,6 +41,9 @@ public sealed class ProviderHuggingFace : BaseProvider
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
};
// Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the HuggingFace HTTP chat request:
var huggingfaceChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{
@ -68,6 +71,7 @@ public sealed class ProviderHuggingFace : BaseProvider
}
}).ToList()],
Stream = true,
AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS);
async Task<HttpRequestMessage> RequestBuilder()
@ -113,7 +117,5 @@ public sealed class ProviderHuggingFace : BaseProvider
return Task.FromResult(Enumerable.Empty<Model>());
}
public override IReadOnlyCollection<Capability> GetModelCapabilities(Model model) => CapabilitiesOpenSource.GetCapabilities(model);
#endregion
}

View File

@ -19,6 +19,11 @@ public interface IProvider
/// </summary>
public string InstanceName { get; }
/// <summary>
/// The additional API parameters.
/// </summary>
public string AdditionalJsonApiParameters { get; }
/// <summary>
/// Starts a chat completion stream.
/// </summary>
@ -64,10 +69,4 @@ public interface IProvider
/// <returns>The list of embedding models.</returns>
public Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default);
/// <summary>
/// Get the capabilities of a model.
/// </summary>
/// <param name="model">The model to get the capabilities for.</param>
/// <returns>The capabilities of the model.</returns>
public IReadOnlyCollection<Capability> GetModelCapabilities(Model model);
}

View File

@ -144,7 +144,7 @@ public static class LLMProvidersExtensions
/// <returns>The provider instance.</returns>
public static IProvider CreateProvider(this AIStudio.Settings.Provider providerSettings)
{
return providerSettings.UsedLLMProvider.CreateProvider(providerSettings.InstanceName, providerSettings.Host, providerSettings.Hostname, providerSettings.Model, providerSettings.HFInferenceProvider);
return providerSettings.UsedLLMProvider.CreateProvider(providerSettings.InstanceName, providerSettings.Host, providerSettings.Hostname, providerSettings.Model, providerSettings.HFInferenceProvider, providerSettings.AdditionalJsonApiParameters);
}
/// <summary>
@ -157,29 +157,29 @@ public static class LLMProvidersExtensions
return embeddingProviderSettings.UsedLLMProvider.CreateProvider(embeddingProviderSettings.Name, embeddingProviderSettings.Host, embeddingProviderSettings.Hostname, embeddingProviderSettings.Model, HFInferenceProvider.NONE);
}
private static IProvider CreateProvider(this LLMProviders provider, string instanceName, Host host, string hostname, Model model, HFInferenceProvider inferenceProvider)
private static IProvider CreateProvider(this LLMProviders provider, string instanceName, Host host, string hostname, Model model, HFInferenceProvider inferenceProvider, string expertProviderApiParameter = "")
{
try
{
return provider switch
{
LLMProviders.OPEN_AI => new ProviderOpenAI { InstanceName = instanceName },
LLMProviders.ANTHROPIC => new ProviderAnthropic { InstanceName = instanceName },
LLMProviders.MISTRAL => new ProviderMistral { InstanceName = instanceName },
LLMProviders.GOOGLE => new ProviderGoogle { InstanceName = instanceName },
LLMProviders.X => new ProviderX { InstanceName = instanceName },
LLMProviders.DEEP_SEEK => new ProviderDeepSeek { InstanceName = instanceName },
LLMProviders.ALIBABA_CLOUD => new ProviderAlibabaCloud { InstanceName = instanceName },
LLMProviders.PERPLEXITY => new ProviderPerplexity { InstanceName = instanceName },
LLMProviders.OPEN_AI => new ProviderOpenAI { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
LLMProviders.ANTHROPIC => new ProviderAnthropic { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
LLMProviders.MISTRAL => new ProviderMistral { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
LLMProviders.GOOGLE => new ProviderGoogle { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
LLMProviders.X => new ProviderX { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
LLMProviders.DEEP_SEEK => new ProviderDeepSeek { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
LLMProviders.ALIBABA_CLOUD => new ProviderAlibabaCloud { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
LLMProviders.PERPLEXITY => new ProviderPerplexity { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
LLMProviders.GROQ => new ProviderGroq { InstanceName = instanceName },
LLMProviders.FIREWORKS => new ProviderFireworks { InstanceName = instanceName },
LLMProviders.HUGGINGFACE => new ProviderHuggingFace(inferenceProvider, model) { InstanceName = instanceName },
LLMProviders.GROQ => new ProviderGroq { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
LLMProviders.FIREWORKS => new ProviderFireworks { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
LLMProviders.HUGGINGFACE => new ProviderHuggingFace(inferenceProvider, model) { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
LLMProviders.SELF_HOSTED => new ProviderSelfHosted(host, hostname) { InstanceName = instanceName },
LLMProviders.SELF_HOSTED => new ProviderSelfHosted(host, hostname) { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
LLMProviders.HELMHOLTZ => new ProviderHelmholtz { InstanceName = instanceName },
LLMProviders.GWDG => new ProviderGWDG { InstanceName = instanceName },
LLMProviders.HELMHOLTZ => new ProviderHelmholtz { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
LLMProviders.GWDG => new ProviderGWDG { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
_ => new NoProvider(),
};

View File

@ -1,3 +1,5 @@
using System.Text.Json.Serialization;
namespace AIStudio.Provider.Mistral;
/// <summary>
@ -14,4 +16,9 @@ public readonly record struct ChatRequest(
bool Stream,
int RandomSeed,
bool SafePrompt = false
);
)
{
// Attention: The "required" modifier is not supported for [JsonExtensionData].
[JsonExtensionData]
public IDictionary<string, object> AdditionalApiParameters { get; init; } = new Dictionary<string, object>();
}

View File

@ -34,6 +34,9 @@ public sealed class ProviderMistral() : BaseProvider("https://api.mistral.ai/v1/
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
};
// Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the Mistral HTTP chat request:
var mistralChatRequest = JsonSerializer.Serialize(new ChatRequest
{
@ -63,7 +66,8 @@ public sealed class ProviderMistral() : BaseProvider("https://api.mistral.ai/v1/
// Right now, we only support streaming completions:
Stream = true,
SafePrompt = false,
SafePrompt = apiParameters.TryGetValue("safe_prompt", out var value) && value is true,
AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS);
async Task<HttpRequestMessage> RequestBuilder()
@ -122,56 +126,6 @@ public sealed class ProviderMistral() : BaseProvider("https://api.mistral.ai/v1/
return Task.FromResult(Enumerable.Empty<Provider.Model>());
}
public override IReadOnlyCollection<Capability> GetModelCapabilities(Provider.Model model)
{
var modelName = model.Id.ToLowerInvariant().AsSpan();
// Pixtral models are able to do process images:
if (modelName.IndexOf("pixtral") is not -1)
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// Mistral medium:
if (modelName.IndexOf("mistral-medium-") is not -1)
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// Mistral small:
if (modelName.IndexOf("mistral-small-") is not -1)
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// Mistral saba:
if (modelName.IndexOf("mistral-saba-") is not -1)
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
// Default:
return CapabilitiesOpenSource.GetCapabilities(model);
}
#endregion
private async Task<ModelsResponse> LoadModelList(string? apiKeyProvisional, CancellationToken token)

View File

@ -13,6 +13,9 @@ public class NoProvider : IProvider
public string InstanceName { get; set; } = "None";
/// <inheritdoc />
public string AdditionalJsonApiParameters { get; init; } = string.Empty;
public Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) => Task.FromResult<IEnumerable<Model>>([]);
public Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) => Task.FromResult<IEnumerable<Model>>([]);

View File

@ -1,3 +1,5 @@
using System.Text.Json.Serialization;
namespace AIStudio.Provider.OpenAI;
/// <summary>
@ -15,4 +17,8 @@ public record ChatCompletionAPIRequest(
public ChatCompletionAPIRequest() : this(string.Empty, [], true)
{
}
// Attention: The "required" modifier is not supported for [JsonExtensionData].
[JsonExtensionData]
public IDictionary<string, object> AdditionalApiParameters { get; init; } = new Dictionary<string, object>();
}

View File

@ -59,7 +59,7 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
};
// Read the model capabilities:
var modelCapabilities = this.GetModelCapabilities(chatModel);
var modelCapabilities = ProviderExtensions.GetModelCapabilitiesOpenAI(chatModel);
// Check if we are using the Responses API or the Chat Completion API:
var usingResponsesAPI = modelCapabilities.Contains(Capability.RESPONSES_API);
@ -85,6 +85,10 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
_ => []
};
// Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters("input", "store", "tools");
//
// Create the request: either for the Responses API or the Chat Completion API
//
@ -119,6 +123,7 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
// Right now, we only support streaming completions:
Stream = true,
AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS),
// Responses API request:
@ -157,6 +162,9 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
// Tools we want to use:
Tools = tools,
// Additional API parameters:
AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS),
};
@ -215,144 +223,6 @@ public sealed class ProviderOpenAI() : BaseProvider("https://api.openai.com/v1/"
return this.LoadModels(["text-embedding-"], token, apiKeyProvisional);
}
public override IReadOnlyCollection<Capability> GetModelCapabilities(Model model)
{
var modelName = model.Id.ToLowerInvariant().AsSpan();
if (modelName is "gpt-4o-search-preview")
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.WEB_SEARCH,
Capability.CHAT_COMPLETION_API,
];
if (modelName is "gpt-4o-mini-search-preview")
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.WEB_SEARCH,
Capability.CHAT_COMPLETION_API,
];
if (modelName.StartsWith("o1-mini"))
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.ALWAYS_REASONING,
Capability.CHAT_COMPLETION_API,
];
if(modelName is "gpt-3.5-turbo")
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.RESPONSES_API,
];
if(modelName.StartsWith("gpt-3.5"))
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
if (modelName.StartsWith("chatgpt-4o-"))
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.RESPONSES_API,
];
if (modelName.StartsWith("o3-mini"))
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.ALWAYS_REASONING, Capability.FUNCTION_CALLING,
Capability.RESPONSES_API,
];
if (modelName.StartsWith("o4-mini") || modelName.StartsWith("o3"))
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.ALWAYS_REASONING, Capability.FUNCTION_CALLING,
Capability.WEB_SEARCH,
Capability.RESPONSES_API,
];
if (modelName.StartsWith("o1"))
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.ALWAYS_REASONING, Capability.FUNCTION_CALLING,
Capability.RESPONSES_API,
];
if(modelName.StartsWith("gpt-4-turbo"))
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.RESPONSES_API,
];
if(modelName is "gpt-4" || modelName.StartsWith("gpt-4-"))
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.RESPONSES_API,
];
if(modelName.StartsWith("gpt-5-nano"))
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING, Capability.ALWAYS_REASONING,
Capability.RESPONSES_API,
];
if(modelName is "gpt-5" || modelName.StartsWith("gpt-5-"))
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING, Capability.ALWAYS_REASONING,
Capability.WEB_SEARCH,
Capability.RESPONSES_API,
];
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.RESPONSES_API,
];
}
#endregion
private async Task<IEnumerable<Model>> LoadModels(string[] prefixes, CancellationToken token, string? apiKeyProvisional = null)

View File

@ -1,3 +1,5 @@
using System.Text.Json.Serialization;
namespace AIStudio.Provider.OpenAI;
/// <summary>
@ -18,4 +20,8 @@ public record ResponsesAPIRequest(
public ResponsesAPIRequest() : this(string.Empty, [], true, false, [])
{
}
// Attention: The "required" modifier is not supported for [JsonExtensionData].
[JsonExtensionData]
public IDictionary<string, object> AdditionalApiParameters { get; init; } = new Dictionary<string, object>();
}

View File

@ -45,6 +45,9 @@ public sealed class ProviderPerplexity() : BaseProvider("https://api.perplexity.
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
};
// Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the Perplexity HTTP chat request:
var perplexityChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{
@ -72,6 +75,7 @@ public sealed class ProviderPerplexity() : BaseProvider("https://api.perplexity.
}
}).ToList()],
Stream = true,
AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS);
async Task<HttpRequestMessage> RequestBuilder()
@ -117,38 +121,6 @@ public sealed class ProviderPerplexity() : BaseProvider("https://api.perplexity.
return Task.FromResult(Enumerable.Empty<Model>());
}
public override IReadOnlyCollection<Capability> GetModelCapabilities(Model model)
{
var modelName = model.Id.ToLowerInvariant().AsSpan();
if(modelName.IndexOf("reasoning") is not -1 ||
modelName.IndexOf("deep-research") is not -1)
return
[
Capability.TEXT_INPUT,
Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.IMAGE_OUTPUT,
Capability.ALWAYS_REASONING,
Capability.WEB_SEARCH,
Capability.CHAT_COMPLETION_API,
];
return
[
Capability.TEXT_INPUT,
Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.IMAGE_OUTPUT,
Capability.WEB_SEARCH,
Capability.CHAT_COMPLETION_API,
];
}
#endregion
private Task<IEnumerable<Model>> LoadModels() => Task.FromResult<IEnumerable<Model>>(KNOWN_MODELS);

View File

@ -1,3 +1,5 @@
using System.Text.Json.Serialization;
namespace AIStudio.Provider.SelfHosted;
/// <summary>
@ -10,4 +12,9 @@ public readonly record struct ChatRequest(
string Model,
IList<Message> Messages,
bool Stream
);
)
{
// Attention: The "required" modifier is not supported for [JsonExtensionData].
[JsonExtensionData]
public IDictionary<string, object> AdditionalApiParameters { get; init; } = new Dictionary<string, object>();
}

View File

@ -32,6 +32,9 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
};
// Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the OpenAI HTTP chat request:
var providerChatRequest = JsonSerializer.Serialize(new ChatRequest
{
@ -60,7 +63,8 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
}).ToList()],
// Right now, we only support streaming completions:
Stream = true
Stream = true,
AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS);
async Task<HttpRequestMessage> RequestBuilder()
@ -142,8 +146,6 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
}
}
public override IReadOnlyCollection<Capability> GetModelCapabilities(Provider.Model model) => CapabilitiesOpenSource.GetCapabilities(model);
#endregion
private async Task<IEnumerable<Provider.Model>> LoadModels(string[] ignorePhrases, string[] filterPhrases, CancellationToken token, string? apiKeyProvisional = null)

View File

@ -36,6 +36,9 @@ public sealed class ProviderX() : BaseProvider("https://api.x.ai/v1/", LOGGER)
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
};
// Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters();
// Prepare the xAI HTTP chat request:
var xChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{
@ -65,6 +68,7 @@ public sealed class ProviderX() : BaseProvider("https://api.x.ai/v1/", LOGGER)
// Right now, we only support streaming completions:
Stream = true,
AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS);
async Task<HttpRequestMessage> RequestBuilder()
@ -111,8 +115,6 @@ public sealed class ProviderX() : BaseProvider("https://api.x.ai/v1/", LOGGER)
return Task.FromResult<IEnumerable<Model>>([]);
}
public override IReadOnlyCollection<Capability> GetModelCapabilities(Model model) => CapabilitiesOpenSource.GetCapabilities(model);
#endregion
private async Task<IEnumerable<Model>> LoadModels(string[] prefixes, CancellationToken token, string? apiKeyProvisional = null)

View File

@ -70,7 +70,7 @@ public sealed class DataApp(Expression<Func<Data, DataApp>>? configSelection = n
/// <summary>
/// Should we preselect a profile for the entire app?
/// </summary>
public string PreselectedProfile { get; set; } = string.Empty;
public string PreselectedProfile { get; set; } = ManagedConfiguration.Register(configSelection, n => n.PreselectedProfile, string.Empty);
/// <summary>
/// Should we preselect a chat template for the entire app?

View File

@ -1,11 +1,25 @@
using AIStudio.Tools.PluginSystem;
using Lua;
namespace AIStudio.Settings;
public readonly record struct Profile(uint Num, string Id, string Name, string NeedToKnow, string Actions)
public record Profile(
uint Num,
string Id,
string Name,
string NeedToKnow,
string Actions,
bool IsEnterpriseConfiguration = false,
Guid EnterpriseConfigurationPluginId = default): ConfigurationBaseObject
{
public Profile() : this(0, Guid.Empty.ToString(), string.Empty, string.Empty, string.Empty)
{
}
private static string TB(string fallbackEN) => I18N.I.T(fallbackEN, typeof(Profile).Namespace, nameof(Profile));
private static readonly ILogger<Profile> LOGGER = Program.LOGGER_FACTORY.CreateLogger<Profile>();
public static readonly Profile NO_PROFILE = new()
{
Name = TB("Use no profile"),
@ -60,4 +74,46 @@ public readonly record struct Profile(uint Num, string Id, string Name, string N
{actions}
""";
}
public static bool TryParseProfileTable(int idx, LuaTable table, Guid configPluginId, out ConfigurationBaseObject template)
{
LOGGER.LogInformation($"\n Profile table parsing {idx}.\n");
template = NO_PROFILE;
if (!table.TryGetValue("Id", out var idValue) || !idValue.TryRead<string>(out var idText) || !Guid.TryParse(idText, out var id))
{
LOGGER.LogWarning($"The configured profile {idx} does not contain a valid ID. The ID must be a valid GUID.");
return false;
}
if (!table.TryGetValue("Name", out var nameValue) || !nameValue.TryRead<string>(out var name))
{
LOGGER.LogWarning($"The configured profile {idx} does not contain a valid name.");
return false;
}
if (!table.TryGetValue("NeedToKnow", out var needToKnowValue) || !needToKnowValue.TryRead<string>(out var needToKnow))
{
LOGGER.LogWarning($"The configured profile {idx} does not contain valid NeedToKnow data.");
return false;
}
if (!table.TryGetValue("Actions", out var actionsValue) || !actionsValue.TryRead<string>(out var actions))
{
LOGGER.LogWarning($"The configured profile {idx} does not contain valid actions data.");
return false;
}
template = new Profile
{
Num = 0,
Id = id.ToString(),
Name = name,
NeedToKnow = needToKnow,
Actions = actions,
IsEnterpriseConfiguration = true,
EnterpriseConfigurationPluginId = configPluginId,
};
return true;
}
}

View File

@ -31,7 +31,8 @@ public sealed record Provider(
Guid EnterpriseConfigurationPluginId = default,
string Hostname = "http://localhost:1234",
Host Host = Host.NONE,
HFInferenceProvider HFInferenceProvider = HFInferenceProvider.NONE) : ConfigurationBaseObject, ISecretId
HFInferenceProvider HFInferenceProvider = HFInferenceProvider.NONE,
string AdditionalJsonApiParameters = "") : ConfigurationBaseObject, ISecretId
{
private static readonly ILogger<Provider> LOGGER = Program.LOGGER_FACTORY.CreateLogger<Provider>();
@ -132,6 +133,12 @@ public sealed record Provider(
LOGGER.LogWarning($"The configured provider {idx} does not contain a valid model configuration.");
return false;
}
if (!table.TryGetValue("AdditionalJsonApiParameters", out var additionalJsonApiParametersValue) || !additionalJsonApiParametersValue.TryRead<string>(out var additionalJsonApiParameters))
{
LOGGER.LogWarning($"The configured provider {idx} does not contain valid additional JSON API parameters.");
return false;
}
provider = new Provider
{
@ -144,7 +151,8 @@ public sealed record Provider(
IsEnterpriseConfiguration = true,
EnterpriseConfigurationPluginId = configPluginId,
Hostname = hostname,
Host = host
Host = host,
AdditionalJsonApiParameters = additionalJsonApiParameters,
};
return true;

View File

@ -0,0 +1,84 @@
using AIStudio.Provider;
namespace AIStudio.Settings;
public static partial class ProviderExtensions
{
public static List<Capability> GetModelCapabilitiesAlibaba(Model model)
{
var modelName = model.Id.ToLowerInvariant().AsSpan();
// Qwen models:
if (modelName.StartsWith("qwen"))
{
// Check for omni models:
if (modelName.IndexOf("omni") is not -1)
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.AUDIO_INPUT, Capability.SPEECH_INPUT,
Capability.VIDEO_INPUT,
Capability.TEXT_OUTPUT, Capability.SPEECH_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
// Check for Qwen 3:
if(modelName.StartsWith("qwen3"))
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.OPTIONAL_REASONING, Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
if(modelName.IndexOf("-vl-") is not -1)
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
}
// QwQ models:
if (modelName.StartsWith("qwq"))
{
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.ALWAYS_REASONING, Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
}
// QVQ models:
if (modelName.StartsWith("qvq"))
{
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.ALWAYS_REASONING,
Capability.CHAT_COMPLETION_API,
];
}
// Default to text input and output:
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
}
}

View File

@ -0,0 +1,49 @@
using AIStudio.Provider;
namespace AIStudio.Settings;
public static partial class ProviderExtensions
{
public static List<Capability> GetModelCapabilitiesAnthropic(Model model)
{
var modelName = model.Id.ToLowerInvariant().AsSpan();
// Claude 4.x models:
if(modelName.StartsWith("claude-opus-4") || modelName.StartsWith("claude-sonnet-4"))
return [
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.OPTIONAL_REASONING, Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// Claude 3.7 is able to do reasoning:
if(modelName.StartsWith("claude-3-7"))
return [
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.OPTIONAL_REASONING, Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// All other 3.x models are able to process text and images as input:
if(modelName.StartsWith("claude-3-"))
return [
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// Any other model is able to process text only:
return [
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
}
}

View File

@ -0,0 +1,28 @@
using AIStudio.Provider;
namespace AIStudio.Settings;
public static partial class ProviderExtensions
{
public static List<Capability> GetModelCapabilitiesDeepSeek(Model model)
{
var modelName = model.Id.ToLowerInvariant().AsSpan();
if(modelName.IndexOf("reasoner") is not -1)
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.ALWAYS_REASONING,
Capability.CHAT_COMPLETION_API,
];
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
}
}

View File

@ -0,0 +1,95 @@
using AIStudio.Provider;
namespace AIStudio.Settings;
public static partial class ProviderExtensions
{
public static List<Capability> GetModelCapabilitiesGoogle(Model model)
{
var modelName = model.Id.ToLowerInvariant().AsSpan();
if (modelName.IndexOf("gemini-") is not -1)
{
// Reasoning models:
if (modelName.IndexOf("gemini-2.5") is not -1)
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT, Capability.AUDIO_INPUT,
Capability.SPEECH_INPUT, Capability.VIDEO_INPUT,
Capability.TEXT_OUTPUT,
Capability.ALWAYS_REASONING, Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// Image generation:
if(modelName.IndexOf("-2.0-flash-preview-image-") is not -1)
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT, Capability.AUDIO_INPUT,
Capability.SPEECH_INPUT, Capability.VIDEO_INPUT,
Capability.TEXT_OUTPUT, Capability.IMAGE_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
// Realtime model:
if(modelName.IndexOf("-2.0-flash-live-") is not -1)
return
[
Capability.TEXT_INPUT, Capability.AUDIO_INPUT, Capability.SPEECH_INPUT,
Capability.VIDEO_INPUT,
Capability.TEXT_OUTPUT, Capability.SPEECH_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// The 2.0 flash models cannot call functions:
if(modelName.IndexOf("-2.0-flash-") is not -1)
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT, Capability.AUDIO_INPUT,
Capability.SPEECH_INPUT, Capability.VIDEO_INPUT,
Capability.TEXT_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
// The old 1.0 pro vision model:
if(modelName.IndexOf("pro-vision") is not -1)
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
// Default to all other Gemini models:
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT, Capability.AUDIO_INPUT,
Capability.SPEECH_INPUT, Capability.VIDEO_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
}
// Default for all other models:
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
}
}

View File

@ -0,0 +1,56 @@
using AIStudio.Provider;
namespace AIStudio.Settings;
public static partial class ProviderExtensions
{
public static List<Capability> GetModelCapabilitiesMistral(Model model)
{
var modelName = model.Id.ToLowerInvariant().AsSpan();
// Pixtral models are able to do process images:
if (modelName.IndexOf("pixtral") is not -1)
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// Mistral medium:
if (modelName.IndexOf("mistral-medium-") is not -1)
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// Mistral small:
if (modelName.IndexOf("mistral-small-") is not -1)
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// Mistral saba:
if (modelName.IndexOf("mistral-saba-") is not -1)
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
// Default:
return GetModelCapabilitiesOpenSource(model);
}
}

View File

@ -0,0 +1,155 @@
using AIStudio.Provider;
namespace AIStudio.Settings;
public static partial class ProviderExtensions
{
public static List<Capability> GetModelCapabilitiesOpenAI(Model model)
{
var modelName = model.Id.ToLowerInvariant().AsSpan();
if (modelName is "gpt-4o-search-preview")
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.WEB_SEARCH,
Capability.CHAT_COMPLETION_API,
];
if (modelName is "gpt-4o-mini-search-preview")
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.WEB_SEARCH,
Capability.CHAT_COMPLETION_API,
];
if (modelName.StartsWith("o1-mini"))
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.ALWAYS_REASONING,
Capability.CHAT_COMPLETION_API,
];
if(modelName is "gpt-3.5-turbo")
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.RESPONSES_API,
];
if(modelName.StartsWith("gpt-3.5"))
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
if (modelName.StartsWith("chatgpt-4o-"))
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.RESPONSES_API,
];
if (modelName.StartsWith("o3-mini"))
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.ALWAYS_REASONING, Capability.FUNCTION_CALLING,
Capability.RESPONSES_API,
];
if (modelName.StartsWith("o4-mini") || modelName.StartsWith("o3"))
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.ALWAYS_REASONING, Capability.FUNCTION_CALLING,
Capability.WEB_SEARCH,
Capability.RESPONSES_API,
];
if (modelName.StartsWith("o1"))
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.ALWAYS_REASONING, Capability.FUNCTION_CALLING,
Capability.RESPONSES_API,
];
if(modelName.StartsWith("gpt-4-turbo"))
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.RESPONSES_API,
];
if(modelName is "gpt-4" || modelName.StartsWith("gpt-4-"))
return
[
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.RESPONSES_API,
];
if(modelName.StartsWith("gpt-5-nano"))
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING, Capability.ALWAYS_REASONING,
Capability.RESPONSES_API,
];
if(modelName is "gpt-5" || modelName.StartsWith("gpt-5-"))
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING, Capability.ALWAYS_REASONING,
Capability.WEB_SEARCH,
Capability.RESPONSES_API,
];
if(modelName is "gpt-5.1" || modelName.StartsWith("gpt-5.1-"))
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT, Capability.IMAGE_OUTPUT,
Capability.FUNCTION_CALLING, Capability.OPTIONAL_REASONING,
Capability.WEB_SEARCH,
Capability.RESPONSES_API, Capability.CHAT_COMPLETION_API,
];
return
[
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.RESPONSES_API,
];
}
}

View File

@ -1,8 +1,10 @@
namespace AIStudio.Provider;
using AIStudio.Provider;
public static class CapabilitiesOpenSource
namespace AIStudio.Settings;
public static partial class ProviderExtensions
{
public static IReadOnlyCollection<Capability> GetCapabilities(Model model)
public static List<Capability> GetModelCapabilitiesOpenSource(Model model)
{
var modelName = model.Id.ToLowerInvariant().AsSpan();

View File

@ -0,0 +1,38 @@
using AIStudio.Provider;
namespace AIStudio.Settings;
public static partial class ProviderExtensions
{
public static List<Capability> GetModelCapabilitiesPerplexity(Model model)
{
var modelName = model.Id.ToLowerInvariant().AsSpan();
if(modelName.IndexOf("reasoning") is not -1 ||
modelName.IndexOf("deep-research") is not -1)
return
[
Capability.TEXT_INPUT,
Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.IMAGE_OUTPUT,
Capability.ALWAYS_REASONING,
Capability.WEB_SEARCH,
Capability.CHAT_COMPLETION_API,
];
return
[
Capability.TEXT_INPUT,
Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.IMAGE_OUTPUT,
Capability.WEB_SEARCH,
Capability.CHAT_COMPLETION_API,
];
}
}

View File

@ -0,0 +1,29 @@
using AIStudio.Provider;
namespace AIStudio.Settings;
public static partial class ProviderExtensions
{
public static List<Capability> GetModelCapabilities(this Provider provider) => provider.UsedLLMProvider switch
{
LLMProviders.OPEN_AI => GetModelCapabilitiesOpenAI(provider.Model),
LLMProviders.MISTRAL => GetModelCapabilitiesMistral(provider.Model),
LLMProviders.ANTHROPIC => GetModelCapabilitiesAnthropic(provider.Model),
LLMProviders.GOOGLE => GetModelCapabilitiesGoogle(provider.Model),
LLMProviders.X => GetModelCapabilitiesOpenSource(provider.Model),
LLMProviders.DEEP_SEEK => GetModelCapabilitiesDeepSeek(provider.Model),
LLMProviders.ALIBABA_CLOUD => GetModelCapabilitiesAlibaba(provider.Model),
LLMProviders.PERPLEXITY => GetModelCapabilitiesPerplexity(provider.Model),
LLMProviders.GROQ => GetModelCapabilitiesOpenSource(provider.Model),
LLMProviders.FIREWORKS => GetModelCapabilitiesOpenSource(provider.Model),
LLMProviders.HUGGINGFACE => GetModelCapabilitiesOpenSource(provider.Model),
LLMProviders.HELMHOLTZ => GetModelCapabilitiesOpenSource(provider.Model),
LLMProviders.GWDG => GetModelCapabilitiesOpenSource(provider.Model),
LLMProviders.SELF_HOSTED => GetModelCapabilitiesOpenSource(provider.Model),
_ => []
};
}

View File

@ -260,11 +260,11 @@ public sealed class SettingsManager
public Profile GetPreselectedProfile(Tools.Components component)
{
var preselection = component.PreselectedProfile(this);
if (preselection != default)
if (preselection != Profile.NO_PROFILE)
return preselection;
preselection = this.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == this.ConfigurationData.App.PreselectedProfile);
return preselection != default ? preselection : Profile.NO_PROFILE;
return preselection ?? Profile.NO_PROFILE;
}
public ChatTemplate GetPreselectedChatTemplate(Tools.Components component)

View File

@ -131,20 +131,19 @@ public static class ComponentsExtensions
public static Profile PreselectedProfile(this Components component, SettingsManager settingsManager) => component switch
{
Components.AGENDA_ASSISTANT => settingsManager.ConfigurationData.Agenda.PreselectOptions ? settingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == settingsManager.ConfigurationData.Agenda.PreselectedProfile) : default,
Components.CODING_ASSISTANT => settingsManager.ConfigurationData.Coding.PreselectOptions ? settingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == settingsManager.ConfigurationData.Coding.PreselectedProfile) : default,
Components.EMAIL_ASSISTANT => settingsManager.ConfigurationData.EMail.PreselectOptions ? settingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == settingsManager.ConfigurationData.EMail.PreselectedProfile) : default,
Components.LEGAL_CHECK_ASSISTANT => settingsManager.ConfigurationData.LegalCheck.PreselectOptions ? settingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == settingsManager.ConfigurationData.LegalCheck.PreselectedProfile) : default,
Components.MY_TASKS_ASSISTANT => settingsManager.ConfigurationData.MyTasks.PreselectOptions ? settingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == settingsManager.ConfigurationData.MyTasks.PreselectedProfile) : default,
Components.BIAS_DAY_ASSISTANT => settingsManager.ConfigurationData.BiasOfTheDay.PreselectOptions ? settingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == settingsManager.ConfigurationData.BiasOfTheDay.PreselectedProfile) : default,
Components.ERI_ASSISTANT => settingsManager.ConfigurationData.ERI.PreselectOptions ? settingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == settingsManager.ConfigurationData.ERI.PreselectedProfile) : default,
#warning Add preselected profile for DOCUMENT_ANALYSIS_ASSISTANT:
// Components.DOCUMENT_ANALYSIS_ASSISTANT => settingsManager.ConfigurationData.DocumentAnalysis.PreselectOptions ? settingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == settingsManager.ConfigurationData.DocumentAnalysis.PreselectedProfile) : default,
Components.AGENDA_ASSISTANT => settingsManager.ConfigurationData.Agenda.PreselectOptions ? settingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == settingsManager.ConfigurationData.Agenda.PreselectedProfile) ?? Profile.NO_PROFILE : Profile.NO_PROFILE,
Components.CODING_ASSISTANT => settingsManager.ConfigurationData.Coding.PreselectOptions ? settingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == settingsManager.ConfigurationData.Coding.PreselectedProfile) ?? Profile.NO_PROFILE : Profile.NO_PROFILE,
Components.EMAIL_ASSISTANT => settingsManager.ConfigurationData.EMail.PreselectOptions ? settingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == settingsManager.ConfigurationData.EMail.PreselectedProfile) ?? Profile.NO_PROFILE : Profile.NO_PROFILE,
Components.LEGAL_CHECK_ASSISTANT => settingsManager.ConfigurationData.LegalCheck.PreselectOptions ? settingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == settingsManager.ConfigurationData.LegalCheck.PreselectedProfile) ?? Profile.NO_PROFILE : Profile.NO_PROFILE,
Components.MY_TASKS_ASSISTANT => settingsManager.ConfigurationData.MyTasks.PreselectOptions ? settingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == settingsManager.ConfigurationData.MyTasks.PreselectedProfile) ?? Profile.NO_PROFILE : Profile.NO_PROFILE,
Components.BIAS_DAY_ASSISTANT => settingsManager.ConfigurationData.BiasOfTheDay.PreselectOptions ? settingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == settingsManager.ConfigurationData.BiasOfTheDay.PreselectedProfile) ?? Profile.NO_PROFILE : Profile.NO_PROFILE,
Components.ERI_ASSISTANT => settingsManager.ConfigurationData.ERI.PreselectOptions ? settingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == settingsManager.ConfigurationData.ERI.PreselectedProfile) ?? Profile.NO_PROFILE : Profile.NO_PROFILE,
Components.CHAT => settingsManager.ConfigurationData.Chat.PreselectOptions ? settingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == settingsManager.ConfigurationData.Chat.PreselectedProfile) : default,
Components.CHAT => settingsManager.ConfigurationData.Chat.PreselectOptions ? settingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == settingsManager.ConfigurationData.Chat.PreselectedProfile) ?? Profile.NO_PROFILE : Profile.NO_PROFILE,
_ => default,
_ => Profile.NO_PROFILE,
};
public static ChatTemplate PreselectedChatTemplate(this Components component, SettingsManager settingsManager) => component switch

View File

@ -37,8 +37,9 @@ public static partial class Pandoc
/// </summary>
/// <param name="rustService">Global rust service to access file system and data dir.</param>
/// <param name="showMessages">Controls if snackbars are shown to the user.</param>
/// <param name="showSuccessMessage">Controls if a success snackbar is shown to the user.</param>
/// <returns>True, if pandoc is available and the minimum required version is met, else false.</returns>
public static async Task<PandocInstallation> CheckAvailabilityAsync(RustService rustService, bool showMessages = true)
public static async Task<PandocInstallation> CheckAvailabilityAsync(RustService rustService, bool showMessages = true, bool showSuccessMessage = true)
{
try
{
@ -80,7 +81,7 @@ public static partial class Pandoc
if (installedVersion >= MINIMUM_REQUIRED_VERSION)
{
if (showMessages)
if (showMessages && showSuccessMessage)
await MessageBus.INSTANCE.SendSuccess(new(Icons.Material.Filled.CheckCircle, string.Format(TB("Pandoc v{0} is installed."), installedVersionString)));
LOG.LogInformation("Pandoc v{0} is installed and matches the required version (v{1})", installedVersionString, MINIMUM_REQUIRED_VERSION.ToString());

View File

@ -0,0 +1,116 @@
using System.Diagnostics;
using AIStudio.Chat;
using AIStudio.Dialogs;
using AIStudio.Tools.PluginSystem;
using AIStudio.Tools.Services;
using DialogOptions = AIStudio.Dialogs.DialogOptions;
namespace AIStudio.Tools;
public static class PandocExport
{
private static readonly ILogger LOGGER = Program.LOGGER_FACTORY.CreateLogger(nameof(PandocExport));
private static string TB(string fallbackEn) => I18N.I.T(fallbackEn, typeof(PandocExport).Namespace, nameof(PandocExport));
public static async Task<bool> ToMicrosoftWord(RustService rustService, IDialogService dialogService, string dialogTitle, IContent markdownContent)
{
var response = await rustService.SaveFile(dialogTitle, new("Microsoft Word", ["docx"]));
if (response.UserCancelled)
{
LOGGER.LogInformation("User cancelled the save dialog.");
return false;
}
LOGGER.LogInformation($"The user chose the path '{response.SaveFilePath}' for the Microsoft Word export.");
var tempMarkdownFile = Guid.NewGuid().ToString();
var tempMarkdownFilePath = Path.Combine(Path.GetTempPath(), tempMarkdownFile);
try
{
// Extract text content from chat:
var markdownText = markdownContent switch
{
ContentText text => text.Text,
ContentImage _ => "Image export to Microsoft Word not yet possible",
_ => "Unknown content type. Cannot export to Word."
};
// Write text content to a temporary file:
await File.WriteAllTextAsync(tempMarkdownFilePath, markdownText);
// Ensure that Pandoc is installed and ready:
var pandocState = await Pandoc.CheckAvailabilityAsync(rustService, showSuccessMessage: false);
if (!pandocState.IsAvailable)
{
var dialogParameters = new DialogParameters<PandocDialog>
{
{ x => x.ShowInitialResultInSnackbar, false },
};
var dialogReference = await dialogService.ShowAsync<PandocDialog>(TB("Pandoc Installation"), dialogParameters, DialogOptions.FULLSCREEN);
await dialogReference.Result;
pandocState = await Pandoc.CheckAvailabilityAsync(rustService, showSuccessMessage: true);
if (!pandocState.IsAvailable)
{
LOGGER.LogError("Pandoc is not available after installation attempt.");
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Cancel, TB("Pandoc is required for Microsoft Word export.")));
return false;
}
}
// Call Pandoc to create the Word file:
var pandoc = await PandocProcessBuilder
.Create()
.UseStandaloneMode()
.WithInputFormat("markdown")
.WithOutputFormat("docx")
.WithOutputFile(response.SaveFilePath)
.WithInputFile(tempMarkdownFilePath)
.BuildAsync(rustService);
using var process = Process.Start(pandoc.StartInfo);
if (process is null)
{
LOGGER.LogError("Failed to start Pandoc process.");
return false;
}
await process.WaitForExitAsync();
if (process.ExitCode is not 0)
{
var error = await process.StandardError.ReadToEndAsync();
LOGGER.LogError($"Pandoc failed with exit code {process.ExitCode}: {error}");
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Cancel, TB("Error during Microsoft Word export")));
return false;
}
LOGGER.LogInformation("Pandoc conversion successful.");
await MessageBus.INSTANCE.SendSuccess(new(Icons.Material.Filled.CheckCircle, TB("Microsoft Word export successful")));
return true;
}
catch (Exception ex)
{
LOGGER.LogError(ex, "Error during Word export.");
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Cancel, TB("Error during Microsoft Word export")));
return false;
}
finally
{
// Try to remove the temp file:
try
{
File.Delete(tempMarkdownFilePath);
}
catch
{
LOGGER.LogWarning($"Was not able to delete temporary file: '{tempMarkdownFilePath}'");
}
}
}
}

View File

@ -19,6 +19,7 @@ public sealed class PandocProcessBuilder
private string? providedOutputFile;
private string? providedInputFormat;
private string? providedOutputFormat;
private bool useStandaloneMode;
private readonly List<string> additionalArguments = new();
@ -57,10 +58,19 @@ public sealed class PandocProcessBuilder
this.additionalArguments.Add(argument);
return this;
}
public PandocProcessBuilder UseStandaloneMode()
{
this.useStandaloneMode = true;
return this;
}
public async Task<PandocPreparedProcess> BuildAsync(RustService rustService)
{
var sbArguments = new StringBuilder();
if (this.useStandaloneMode)
sbArguments.Append(" --standalone ");
if(!string.IsNullOrWhiteSpace(this.providedInputFile))
sbArguments.Append(this.providedInputFile);

View File

@ -73,6 +73,12 @@ public sealed class PluginConfiguration(bool isInternal, LuaState state, PluginT
// Handle configured chat templates:
PluginConfigurationObject.TryParse(PluginConfigurationObjectType.CHAT_TEMPLATE, x => x.ChatTemplates, x => x.NextChatTemplateNum, mainTable, this.Id, ref this.configObjects, dryRun);
// Handle configured profiles:
PluginConfigurationObject.TryParse(PluginConfigurationObjectType.PROFILE, x => x.Profiles, x => x.NextProfileNum, mainTable, this.Id, ref this.configObjects, dryRun);
// Config: preselected profile?
ManagedConfiguration.TryProcessConfiguration(x => x.App, x => x.PreselectedProfile, this.Id, settingsTable, dryRun);
message = string.Empty;
return true;
}

View File

@ -101,7 +101,8 @@ public sealed record PluginConfigurationObject
{
PluginConfigurationObjectType.LLM_PROVIDER => (Settings.Provider.TryParseProviderTable(i, luaObjectTable, configPluginId, out var configurationObject) && configurationObject != Settings.Provider.NONE, configurationObject),
PluginConfigurationObjectType.CHAT_TEMPLATE => (ChatTemplate.TryParseChatTemplateTable(i, luaObjectTable, configPluginId, out var configurationObject) && configurationObject != ChatTemplate.NO_CHAT_TEMPLATE, configurationObject),
PluginConfigurationObjectType.PROFILE => (Profile.TryParseProfileTable(i, luaObjectTable, configPluginId, out var configurationObject) && configurationObject != Profile.NO_PROFILE, configurationObject),
_ => (false, NoConfigurationObject.INSTANCE)
};

View File

@ -137,6 +137,14 @@ public static partial class PluginFactory
if(PluginConfigurationObject.CleanLeftOverConfigurationObjects(PluginConfigurationObjectType.CHAT_TEMPLATE, x => x.ChatTemplates, AVAILABLE_PLUGINS, configObjectList))
wasConfigurationChanged = true;
// Check profiles:
if(PluginConfigurationObject.CleanLeftOverConfigurationObjects(PluginConfigurationObjectType.PROFILE, x => x.Profiles, AVAILABLE_PLUGINS, configObjectList))
wasConfigurationChanged = true;
// Check for a preselected profile:
if(ManagedConfiguration.IsConfigurationLeftOver(x => x.App, x => x.PreselectedProfile, AVAILABLE_PLUGINS))
wasConfigurationChanged = true;
// Check for the update interval:
if(ManagedConfiguration.IsConfigurationLeftOver(x => x.App, x => x.UpdateInterval, AVAILABLE_PLUGINS))
wasConfigurationChanged = true;

View File

@ -0,0 +1,3 @@
namespace AIStudio.Tools.Rust;
public readonly record struct FileSaveResponse(bool UserCancelled, string SaveFilePath);

View File

@ -0,0 +1,10 @@
namespace AIStudio.Tools.Rust;
public class SaveFileOptions
{
public required string Title { get; init; }
public PreviousFile? PreviousFile { get; init; }
public FileTypeFilter? Filter { get; init; }
}

View File

@ -35,4 +35,31 @@ public sealed partial class RustService
return await result.Content.ReadFromJsonAsync<FileSelectionResponse>(this.jsonRustSerializerOptions);
}
/// <summary>
/// Initiates a dialog to let the user select a file for a writing operation.
/// </summary>
/// <param name="title">The title of the save file dialog.</param>
/// <param name="filter">An optional file type filter for filtering specific file formats.</param>
/// <param name="initialFile">An optional initial file path to pre-fill in the dialog.</param>
/// <returns>A <see cref="FileSaveResponse"/> object containing information about whether the user canceled the
/// operation and whether the select operation was successful.</returns>
public async Task<FileSaveResponse> SaveFile(string title, FileTypeFilter? filter = null, string? initialFile = null)
{
var payload = new SaveFileOptions
{
Title = title,
PreviousFile = initialFile is null ? null : new (initialFile),
Filter = filter
};
var result = await this.http.PostAsJsonAsync("/save/file", payload, this.jsonRustSerializerOptions);
if (!result.IsSuccessStatusCode)
{
this.logger!.LogError($"Failed to select a file for writing operation '{result.StatusCode}'");
return new FileSaveResponse(true, string.Empty);
}
return await result.Content.ReadFromJsonAsync<FileSaveResponse>(this.jsonRustSerializerOptions);
}
}

View File

@ -4,23 +4,23 @@
"net9.0": {
"CodeBeam.MudBlazor.Extensions": {
"type": "Direct",
"requested": "[8.2.4, )",
"resolved": "8.2.4",
"contentHash": "IaQoIcREfkHq8VUxFDZQrK69blNw+0FwTjC8JGHhhSGugJZ3UFOjhpYYAhiU/1Eue3PXySLXzJFXzsD/hIArVw==",
"requested": "[8.2.5, )",
"resolved": "8.2.5",
"contentHash": "zZ2zFQeGAqrT0rCE8ZlfnchBUk8IEwFVgZ2mWVHy8EfAQHvgUXHvc6l/t51n1Wx9DMP8beWRDTM6nO1kfYAXZg==",
"dependencies": {
"BuildBundlerMinifier": "3.2.449",
"CsvHelper": "33.0.1",
"Microsoft.AspNetCore.Components": "9.0.7",
"Microsoft.AspNetCore.Components.Web": "9.0.7",
"Microsoft.AspNetCore.Components": "9.0.10",
"Microsoft.AspNetCore.Components.Web": "9.0.10",
"MudBlazor": "8.0.0",
"ZXing.Net": "0.16.9"
}
},
"HtmlAgilityPack": {
"type": "Direct",
"requested": "[1.12.2, )",
"resolved": "1.12.2",
"contentHash": "btF/9sB65h0V9ipZxVfEQ9fxDwXSFRwhi4Z1qFBgnXONqWVKZE3LxS0JEMW73G3gvrFI7/IAqLA1y/15HDa3fw=="
"requested": "[1.12.4, )",
"resolved": "1.12.4",
"contentHash": "ljqvBabvFwKoLniuoQKO8b5bJfJweKLs4fUNS/V5dsvpo0A8MlJqxxn9XVmP2DaskbUXty6IYaWAi1SArGIMeQ=="
},
"LuaCSharp": {
"type": "Direct",
@ -30,11 +30,11 @@
},
"Microsoft.Extensions.FileProviders.Embedded": {
"type": "Direct",
"requested": "[9.0.8, )",
"resolved": "9.0.8",
"contentHash": "LVm1o08C5eSB+WvhOEtp/cm/I3s7bCoYisTBYKgR3KEfHXkKfvkLPXfg4CNHYkxPqZ4T/XuGENDLtbhDywGFHA==",
"requested": "[9.0.11, )",
"resolved": "9.0.11",
"contentHash": "XIrEYbuRq+mam7ljrxf/S4Ug5taFXDNUVGK+rxqx5qZbM572hLBzeS6ClNGy97kJQC5urlApTv6Xprl+xvp6oA==",
"dependencies": {
"Microsoft.Extensions.FileProviders.Abstractions": "9.0.8"
"Microsoft.Extensions.FileProviders.Abstractions": "9.0.11"
}
},
"Microsoft.NET.ILLink.Tasks": {
@ -66,9 +66,9 @@
},
"ReverseMarkdown": {
"type": "Direct",
"requested": "[4.7.0, )",
"resolved": "4.7.0",
"contentHash": "RM5i+RoCG+9Vc897SyjGe2qQ6FRYitU+JFjc4ZAQWQCN/8R1uOSO4B8DdAtBEtRhJcZfIEIqshe2MoLDChyExw==",
"requested": "[4.7.1, )",
"resolved": "4.7.1",
"contentHash": "Tz8yJXg8J1O9xJn6fXzjeEcLTYjfwbauCGwK4f/dpxOOpo3iniXIggmRlJ7F91yHJPB0Gm4lk1/qV0Jxip4a8A==",
"dependencies": {
"HtmlAgilityPack": "1.12.1"
}
@ -90,72 +90,72 @@
},
"Microsoft.AspNetCore.Authorization": {
"type": "Transitive",
"resolved": "9.0.7",
"contentHash": "P0Gej6X5cEoK+sS9XpgYSzg0Nz8OOlvfQb12aOAJW/P4b9nAzLQCVoNp1GDyR/P8eMSnoPARiKPaa6q51iR0oA==",
"resolved": "9.0.10",
"contentHash": "odY40/4vXt1tHeuc89zjEPfx0i0c2jurKW9r884v92i6BGasJkCKTtnIGIREBqnTn+HB4uZLipOdWG/GczQwnQ==",
"dependencies": {
"Microsoft.AspNetCore.Metadata": "9.0.7",
"Microsoft.Extensions.Logging.Abstractions": "9.0.7",
"Microsoft.Extensions.Options": "9.0.7"
"Microsoft.AspNetCore.Metadata": "9.0.10",
"Microsoft.Extensions.Logging.Abstractions": "9.0.10",
"Microsoft.Extensions.Options": "9.0.10"
}
},
"Microsoft.AspNetCore.Components": {
"type": "Transitive",
"resolved": "9.0.7",
"contentHash": "cZpVsxWWGagoP2U6Kjqm107gVZHTmiM2m7YDNRsScTWoBB1iyEIznvYG9ZK4XkDY4yDUTdnZrXRMMVu8K7dJ8Q==",
"resolved": "9.0.10",
"contentHash": "yodHFmpceXlUrWJ53OgzWyoZWvxNFtz8pGAeDXYenZau1UD5nR2uNGMt1QeeA/3LwysnR1JehndthS587P5GrQ==",
"dependencies": {
"Microsoft.AspNetCore.Authorization": "9.0.7",
"Microsoft.AspNetCore.Components.Analyzers": "9.0.7"
"Microsoft.AspNetCore.Authorization": "9.0.10",
"Microsoft.AspNetCore.Components.Analyzers": "9.0.10"
}
},
"Microsoft.AspNetCore.Components.Analyzers": {
"type": "Transitive",
"resolved": "9.0.7",
"contentHash": "SlMcfUJHFxjIFAecPY55in8u93AZo5NQrRlPY3hKrSsLEgyjjtZGzWIn+F9RluHw5wRct/QFRCt2sQwEhn8qtA=="
"resolved": "9.0.10",
"contentHash": "VWSbgP3XaUYdt8JwUOOyx64JI6dLgClYRyiJ6+XcuC/2OW0eKSee3VJwq/1jutZqkAzyBjQ/gpDw7BXmbhrlVA=="
},
"Microsoft.AspNetCore.Components.Forms": {
"type": "Transitive",
"resolved": "9.0.7",
"contentHash": "ecnFWXV/ClmBfkevmalj1e1+T00AkihOyK8yQdKOwKmibraYphyup4BdOLP7v17PNVF4d5njsoHmFtVBvYpsJg==",
"resolved": "9.0.10",
"contentHash": "wVyZxxu8C/P0h4QifYEsVJ4AGWOd9oPtmHa0cUbG43JJ8p1oDu9pvZucJc0MjE7GlX/vmr/HntyvkGN9geL6cg==",
"dependencies": {
"Microsoft.AspNetCore.Components": "9.0.7"
"Microsoft.AspNetCore.Components": "9.0.10"
}
},
"Microsoft.AspNetCore.Components.Web": {
"type": "Transitive",
"resolved": "9.0.7",
"contentHash": "fP+WmahEXWgCTgL/aRo/y75v1nni8E8WfbpkbWOeMBk2UdQORqQbFPIkttu8JPYVACDfVYgEDKIDtVqEY9Akkg==",
"resolved": "9.0.10",
"contentHash": "1yay2fD17JGdSx/U1eeke8ONd0xuJJgpYVk0OKpOaomULRPAP/XTk4IUb4JNpoVhKEoM25y7R/RSXO2So7YTBA==",
"dependencies": {
"Microsoft.AspNetCore.Components": "9.0.7",
"Microsoft.AspNetCore.Components.Forms": "9.0.7",
"Microsoft.Extensions.DependencyInjection": "9.0.7",
"Microsoft.Extensions.Primitives": "9.0.7",
"Microsoft.JSInterop": "9.0.7"
"Microsoft.AspNetCore.Components": "9.0.10",
"Microsoft.AspNetCore.Components.Forms": "9.0.10",
"Microsoft.Extensions.DependencyInjection": "9.0.10",
"Microsoft.Extensions.Primitives": "9.0.10",
"Microsoft.JSInterop": "9.0.10"
}
},
"Microsoft.AspNetCore.Metadata": {
"type": "Transitive",
"resolved": "9.0.7",
"contentHash": "bM2x5yps2P6eXqFkR5ztKX7QRGGqJ4Vy5PxVdR7ADjYPNmMhrD57r8d9H++hpljk9sdjKI3Sppd7NZyA721nEA=="
"resolved": "9.0.10",
"contentHash": "JY5XyecFnIvCMZrtUaI2IrZY/SYidTqTN7H+tXmXxdGlvRGGnf2uUKH47MJu9poJ/raK4SWK5uZQwhd21T2WFw=="
},
"Microsoft.Extensions.DependencyInjection": {
"type": "Transitive",
"resolved": "9.0.7",
"contentHash": "i05AYA91vgq0as84ROVCyltD2gnxaba/f1Qw2rG7mUsS0gv8cPTr1Gm7jPQHq7JTr4MJoQUcanLVs16tIOUJaQ==",
"resolved": "9.0.10",
"contentHash": "iEtXCkNd5XhjNJAOb/wO4IhDRdLIE2CsPxZggZQWJ/q2+sa8dmEPC393nnsiqdH8/4KV8Xn25IzgKPR1UEQ0og==",
"dependencies": {
"Microsoft.Extensions.DependencyInjection.Abstractions": "9.0.7"
"Microsoft.Extensions.DependencyInjection.Abstractions": "9.0.10"
}
},
"Microsoft.Extensions.DependencyInjection.Abstractions": {
"type": "Transitive",
"resolved": "9.0.7",
"contentHash": "iPK1FxbGFr2Xb+4Y+dTYI8Gupu9pOi8I3JPuPsrogUmEhe2hzZ9LpCmolMEBhVDo2ikcSr7G5zYiwaapHSQTew=="
"resolved": "9.0.10",
"contentHash": "r9waLiOPe9ZF1PvzUT+RDoHvpMmY8MW+lb4lqjYGObwKpnyPMLI3odVvlmshwuZcdoHynsGWOrCPA0hxZ63lIA=="
},
"Microsoft.Extensions.FileProviders.Abstractions": {
"type": "Transitive",
"resolved": "9.0.8",
"contentHash": "4zZbQ4w+hCMm9J+z5NOj3giIPT2MhZxx05HX/MGuAmDBbjOuXlYIIRN+t4V6OLxy5nXZIcXO+dQMB/OWubuDkw==",
"resolved": "9.0.11",
"contentHash": "YEPsXWcoNde6J6W/MMjIuNQMPkKTL4NS0AJ1rsAt48+GuJYoZU+Mi4T8PwyzYGDLxhUsH3Wa32DlbKtDkzT40A==",
"dependencies": {
"Microsoft.Extensions.Primitives": "9.0.8"
"Microsoft.Extensions.Primitives": "9.0.11"
}
},
"Microsoft.Extensions.Localization": {
@ -176,30 +176,30 @@
},
"Microsoft.Extensions.Logging.Abstractions": {
"type": "Transitive",
"resolved": "9.0.7",
"contentHash": "sMM6NEAdUTE/elJ2wqjOi0iBWqZmSyaTByLF9e8XHv6DRJFFnOe0N+s8Uc6C91E4SboQCfLswaBIZ+9ZXA98AA==",
"resolved": "9.0.10",
"contentHash": "MFUPv/nN1rAQ19w43smm6bbf0JDYN/1HEPHoiMYY50pvDMFpglzWAuoTavByDmZq7UuhjaxwrET3joU69ZHoHQ==",
"dependencies": {
"Microsoft.Extensions.DependencyInjection.Abstractions": "9.0.7"
"Microsoft.Extensions.DependencyInjection.Abstractions": "9.0.10"
}
},
"Microsoft.Extensions.Options": {
"type": "Transitive",
"resolved": "9.0.7",
"contentHash": "trJnF6cRWgR5uMmHpGoHmM1wOVFdIYlELlkO9zX+RfieK0321Y55zrcs4AaEymKup7dxgEN/uJU25CAcMNQRXw==",
"resolved": "9.0.10",
"contentHash": "zMNABt8eBv0B0XrWjFy9nZNgddavaOeq3ZdaD5IlHhRH65MrU7HM+Hd8GjWE3e2VDGFPZFfSAc6XVXC17f9fOA==",
"dependencies": {
"Microsoft.Extensions.DependencyInjection.Abstractions": "9.0.7",
"Microsoft.Extensions.Primitives": "9.0.7"
"Microsoft.Extensions.DependencyInjection.Abstractions": "9.0.10",
"Microsoft.Extensions.Primitives": "9.0.10"
}
},
"Microsoft.Extensions.Primitives": {
"type": "Transitive",
"resolved": "9.0.8",
"contentHash": "tizSIOEsIgSNSSh+hKeUVPK7xmTIjR8s+mJWOu1KXV3htvNQiPMFRMO17OdI1y/4ZApdBVk49u/08QGC9yvLug=="
"resolved": "9.0.11",
"contentHash": "rtUNSIhbQTv8iSBTFvtg2b/ZUkoqC9qAH9DdC2hr+xPpoZrxiCITci9UR/ELUGUGnGUrF8Xye+tGVRhCxE+4LA=="
},
"Microsoft.JSInterop": {
"type": "Transitive",
"resolved": "9.0.7",
"contentHash": "+FFcgE9nFf/M/8sSJPzKnGFkALO5Q3mCdljpsxe/ZFRt6bqMcImv8d74HgMamOauhmVlC7MU9GmnbblF9CpNlQ=="
"resolved": "9.0.10",
"contentHash": "+Zxxwp8rspdxq6uIkaEtqWW/vljDr2tLiLuhPUYV0+CzeuFpuwcKJ95iz6L9xbakxqjZN3WjmJBtqWZfB+zC5A=="
},
"ZXing.Net": {
"type": "Transitive",
@ -210,6 +210,6 @@
"type": "Project"
}
},
"net9.0/osx-arm64": {}
"net9.0/win-x64": {}
}
}

View File

@ -1,4 +1,4 @@
# v0.9.52, build 227 (2025-09-xx xx:xx UTC)
# v0.9.52, build 227 (2025-10-24 06:00 UTC)
- Added a feature so that matching results from data sources (local data sources as well as external ones via the ERI interface) are now also displayed at the end of a chat. All sources that come directly from the AI (like web searches) appear first, followed by those that come from the data sources. This source display works regardless of whether the AI actually used these sources, so users always get all the relevant information.
- Added the ability to manage the preview feature visibility and enabled preview features by using enterprise IT configurations.
- Improved developer experience by detecting development environments and disabling update prompts in those environments.

View File

@ -0,0 +1,12 @@
# v0.9.53, build 228 (2025-11-14 13:14 UTC)
- Added expert settings to the provider dialog to enable setting additional parameters. Also, additional parameters can be configured by configuration plugins for enterprise scenarios. Thanks to Peer (`peerschuett`) for this contribution.
- Added the ability to export AI responses from the chat into Microsoft Word files. Thank you, Sabrina (`Sabrina-devops`), for your first contribution.
- Added the ability to use documents as input for profile fields.
- Added the ability to distribute profiles via configuration plugins in enterprise environments. Thanks, Paul (`PaulKoudelka`), for your first contribution.
- Added the ability to preset an app-wide default profile using a configuration plugin in enterprise environments.
- Added support for the new OpenAI GPT 5.1 models.
- Improved profiles by removing their input limits.
- Improved the file reading component to correctly verify the Pandoc installation and open the installation dialog when needed.
- Upgraded dependencies.
- Upgraded to Rust v1.91.1.
- Upgraded to .NET v9.0.11.

View File

@ -0,0 +1 @@
# v0.9.54, build 229 (2025-11-xx xx:xx UTC)

View File

@ -1,11 +1,11 @@
0.9.51
2025-09-04 18:02:17 UTC
226
9.0.109 (commit 08d4728191)
9.0.8 (commit aae90fa090)
1.89.0 (commit 29483883e)
0.9.53
2025-11-14 13:14:31 UTC
228
9.0.112 (commit 49aa03442a)
9.0.11 (commit fa7cdded37)
1.91.1 (commit ed61e7d7e)
8.12.0
1.8.1
ce243913ed4, release
osx-arm64
bac0b49dce9, release
win-x64
137.0.7215.0

2
runtime/Cargo.lock generated
View File

@ -2599,7 +2599,7 @@ checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]]
name = "mindwork-ai-studio"
version = "0.9.51"
version = "0.9.53"
dependencies = [
"aes",
"arboard",

View File

@ -1,6 +1,6 @@
[package]
name = "mindwork-ai-studio"
version = "0.9.51"
version = "0.9.53"
edition = "2021"
description = "MindWork AI Studio"
authors = ["Thorsten Sommer"]

View File

@ -498,6 +498,13 @@ pub struct SelectFileOptions {
filter: Option<FileTypeFilter>,
}
#[derive(Clone, Deserialize)]
pub struct SaveFileOptions {
title: String,
name_file: Option<PreviousFile>,
filter: Option<FileTypeFilter>,
}
#[derive(Serialize)]
pub struct DirectorySelectionResponse {
user_cancelled: bool,
@ -554,6 +561,55 @@ pub fn select_file(_token: APIToken, payload: Json<SelectFileOptions>) -> Json<F
}
}
#[post("/save/file", data = "<payload>")]
pub fn save_file(_token: APIToken, payload: Json<SaveFileOptions>) -> Json<FileSaveResponse> {
// Create a new file dialog builder:
let file_dialog = FileDialogBuilder::new();
// Set the title of the file dialog:
let file_dialog = file_dialog.set_title(&payload.title);
// Set the file type filter if provided:
let file_dialog = match &payload.filter {
Some(filter) => {
file_dialog.add_filter(&filter.filter_name, &filter.filter_extensions.iter().map(|s| s.as_str()).collect::<Vec<&str>>())
},
None => file_dialog,
};
// Set the previous file path if provided:
let file_dialog = match &payload.name_file {
Some(previous) => {
let previous_path = previous.file_path.as_str();
file_dialog.set_directory(previous_path)
},
None => file_dialog,
};
// Displays the file dialogue box and select the file:
let file_path = file_dialog.save_file();
match file_path {
Some(path) => {
info!("User selected file for writing operation: {path:?}");
Json(FileSaveResponse {
user_cancelled: false,
save_file_path: path.to_str().unwrap().to_string(),
})
},
None => {
info!("User cancelled file selection.");
Json(FileSaveResponse {
user_cancelled: true,
save_file_path: String::from(""),
})
},
}
}
#[derive(Clone, Deserialize)]
pub struct PreviousFile {
file_path: String,
@ -564,6 +620,11 @@ pub struct FileSelectionResponse {
user_cancelled: bool,
selected_file_path: String,
}
#[derive(Serialize)]
pub struct FileSaveResponse {
user_cancelled: bool,
save_file_path: String,
}
fn set_pdfium_path(path_resolver: PathResolver) {
let pdfium_relative_source_path = String::from("resources/libraries/");

View File

@ -73,6 +73,7 @@ pub fn start_runtime_api() {
crate::app_window::install_update,
crate::app_window::select_directory,
crate::app_window::select_file,
crate::app_window::save_file,
crate::secret::get_secret,
crate::secret::store_secret,
crate::secret::delete_secret,

View File

@ -6,7 +6,7 @@
},
"package": {
"productName": "MindWork AI Studio",
"version": "0.9.51"
"version": "0.9.53"
},
"tauri": {
"allowlist": {