diff --git a/app/MindWork AI Studio/Assistants/I18N/allTexts.lua b/app/MindWork AI Studio/Assistants/I18N/allTexts.lua
index 124fc826..4908fbb6 100644
--- a/app/MindWork AI Studio/Assistants/I18N/allTexts.lua
+++ b/app/MindWork AI Studio/Assistants/I18N/allTexts.lua
@@ -2101,6 +2101,9 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T14695
-- Add Embedding
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T1738753945"] = "Add Embedding"
+-- Uses the provider-configured model
+UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T1760715963"] = "Uses the provider-configured model"
+
-- Are you sure you want to delete the embedding provider '{0}'?
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T1825371968"] = "Are you sure you want to delete the embedding provider '{0}'?"
@@ -2164,6 +2167,9 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELPROVIDERS::T162847
-- Description
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELPROVIDERS::T1725856265"] = "Description"
+-- Uses the provider-configured model
+UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELPROVIDERS::T1760715963"] = "Uses the provider-configured model"
+
-- Add Provider
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELPROVIDERS::T1806589097"] = "Add Provider"
@@ -2206,9 +2212,6 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELPROVIDERS::T291173
-- Configured LLM Providers
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELPROVIDERS::T3019870540"] = "Configured LLM Providers"
--- as selected by provider
-UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELPROVIDERS::T3082210376"] = "as selected by provider"
-
-- Edit
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELPROVIDERS::T3267849393"] = "Edit"
@@ -2266,6 +2269,9 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T14
-- Add transcription provider
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T1645238629"] = "Add transcription provider"
+-- Uses the provider-configured model
+UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T1760715963"] = "Uses the provider-configured model"
+
-- Add Transcription Provider
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T2066315685"] = "Add Transcription Provider"
@@ -3415,6 +3421,9 @@ UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T3361153305"] = "Show Expert
-- Show available models
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T3763891899"] = "Show available models"
+-- This host uses the model configured at the provider level. No model selection is available.
+UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T3783329915"] = "This host uses the model configured at the provider level. No model selection is available."
+
-- Currently, we cannot query the models for the selected provider and/or host. Therefore, please enter the model name manually.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::PROVIDERDIALOG::T4116737656"] = "Currently, we cannot query the models for the selected provider and/or host. Therefore, please enter the model name manually."
@@ -4639,6 +4648,9 @@ UI_TEXT_CONTENT["AISTUDIO::DIALOGS::TRANSCRIPTIONPROVIDERDIALOG::T2842060373"] =
-- Please enter a transcription model name.
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::TRANSCRIPTIONPROVIDERDIALOG::T3703662664"] = "Please enter a transcription model name."
+-- This host uses the model configured at the provider level. No model selection is available.
+UI_TEXT_CONTENT["AISTUDIO::DIALOGS::TRANSCRIPTIONPROVIDERDIALOG::T3783329915"] = "This host uses the model configured at the provider level. No model selection is available."
+
-- Model selection
UI_TEXT_CONTENT["AISTUDIO::DIALOGS::TRANSCRIPTIONPROVIDERDIALOG::T416738168"] = "Model selection"
diff --git a/app/MindWork AI Studio/Components/Settings/SettingsPanelEmbeddings.razor b/app/MindWork AI Studio/Components/Settings/SettingsPanelEmbeddings.razor
index 4ffc743f..874bc3c9 100644
--- a/app/MindWork AI Studio/Components/Settings/SettingsPanelEmbeddings.razor
+++ b/app/MindWork AI Studio/Components/Settings/SettingsPanelEmbeddings.razor
@@ -35,7 +35,7 @@
@context.Num
@context.Name
@context.UsedLLMProvider.ToName()
- @GetEmbeddingProviderModelName(context)
+ @this.GetEmbeddingProviderModelName(context)
diff --git a/app/MindWork AI Studio/Components/Settings/SettingsPanelEmbeddings.razor.cs b/app/MindWork AI Studio/Components/Settings/SettingsPanelEmbeddings.razor.cs
index 50ebeb13..94878987 100644
--- a/app/MindWork AI Studio/Components/Settings/SettingsPanelEmbeddings.razor.cs
+++ b/app/MindWork AI Studio/Components/Settings/SettingsPanelEmbeddings.razor.cs
@@ -15,8 +15,12 @@ public partial class SettingsPanelEmbeddings : SettingsPanelBase
[Parameter]
public EventCallback>> AvailableEmbeddingProvidersChanged { get; set; }
- private static string GetEmbeddingProviderModelName(EmbeddingProvider provider)
+ private string GetEmbeddingProviderModelName(EmbeddingProvider provider)
{
+ // For system models, return localized text:
+ if (provider.Model.IsSystemModel)
+ return T("Uses the provider-configured model");
+
const int MAX_LENGTH = 36;
var modelName = provider.Model.ToString();
return modelName.Length > MAX_LENGTH ? "[...] " + modelName[^Math.Min(MAX_LENGTH, modelName.Length)..] : modelName;
diff --git a/app/MindWork AI Studio/Components/Settings/SettingsPanelProviders.razor b/app/MindWork AI Studio/Components/Settings/SettingsPanelProviders.razor
index f9567086..3d359408 100644
--- a/app/MindWork AI Studio/Components/Settings/SettingsPanelProviders.razor
+++ b/app/MindWork AI Studio/Components/Settings/SettingsPanelProviders.razor
@@ -1,6 +1,5 @@
@using AIStudio.Provider
@using AIStudio.Settings
-@using AIStudio.Provider.SelfHosted
@inherits SettingsPanelBase
@@ -29,20 +28,7 @@
@context.Num
@context.InstanceName
@context.UsedLLMProvider.ToName()
-
- @if (context.UsedLLMProvider is not LLMProviders.SELF_HOSTED)
- {
- @GetLLMProviderModelName(context)
- }
- else if (context.UsedLLMProvider is LLMProviders.SELF_HOSTED && context.Host is not Host.LLAMA_CPP)
- {
- @GetLLMProviderModelName(context)
- }
- else
- {
- @T("as selected by provider")
- }
-
+ @this.GetLLMProviderModelName(context)
@if (context.IsEnterpriseConfiguration)
diff --git a/app/MindWork AI Studio/Components/Settings/SettingsPanelProviders.razor.cs b/app/MindWork AI Studio/Components/Settings/SettingsPanelProviders.razor.cs
index 035543dc..2272959d 100644
--- a/app/MindWork AI Studio/Components/Settings/SettingsPanelProviders.razor.cs
+++ b/app/MindWork AI Studio/Components/Settings/SettingsPanelProviders.razor.cs
@@ -134,8 +134,12 @@ public partial class SettingsPanelProviders : SettingsPanelBase
await this.MessageBus.SendMessage(this, Event.CONFIGURATION_CHANGED);
}
- private static string GetLLMProviderModelName(AIStudio.Settings.Provider provider)
+ private string GetLLMProviderModelName(AIStudio.Settings.Provider provider)
{
+ // For system models, return localized text:
+ if (provider.Model.IsSystemModel)
+ return T("Uses the provider-configured model");
+
const int MAX_LENGTH = 36;
var modelName = provider.Model.ToString();
return modelName.Length > MAX_LENGTH ? "[...] " + modelName[^Math.Min(MAX_LENGTH, modelName.Length)..] : modelName;
diff --git a/app/MindWork AI Studio/Components/Settings/SettingsPanelTranscription.razor b/app/MindWork AI Studio/Components/Settings/SettingsPanelTranscription.razor
index 82421e94..0405d6cd 100644
--- a/app/MindWork AI Studio/Components/Settings/SettingsPanelTranscription.razor
+++ b/app/MindWork AI Studio/Components/Settings/SettingsPanelTranscription.razor
@@ -32,7 +32,7 @@
@context.Num
@context.Name
@context.UsedLLMProvider.ToName()
- @GetTranscriptionProviderModelName(context)
+ @this.GetTranscriptionProviderModelName(context)
diff --git a/app/MindWork AI Studio/Components/Settings/SettingsPanelTranscription.razor.cs b/app/MindWork AI Studio/Components/Settings/SettingsPanelTranscription.razor.cs
index d564d5cd..243200a3 100644
--- a/app/MindWork AI Studio/Components/Settings/SettingsPanelTranscription.razor.cs
+++ b/app/MindWork AI Studio/Components/Settings/SettingsPanelTranscription.razor.cs
@@ -15,8 +15,12 @@ public partial class SettingsPanelTranscription : SettingsPanelBase
[Parameter]
public EventCallback>> AvailableTranscriptionProvidersChanged { get; set; }
- private static string GetTranscriptionProviderModelName(TranscriptionProvider provider)
+ private string GetTranscriptionProviderModelName(TranscriptionProvider provider)
{
+ // For system models, return localized text:
+ if (provider.Model.IsSystemModel)
+ return T("Uses the provider-configured model");
+
const int MAX_LENGTH = 36;
var modelName = provider.Model.ToString();
return modelName.Length > MAX_LENGTH ? "[...] " + modelName[^Math.Min(MAX_LENGTH, modelName.Length)..] : modelName;
diff --git a/app/MindWork AI Studio/Dialogs/ProviderDialog.razor b/app/MindWork AI Studio/Dialogs/ProviderDialog.razor
index 58ff5e5b..3ed23cba 100644
--- a/app/MindWork AI Studio/Dialogs/ProviderDialog.razor
+++ b/app/MindWork AI Studio/Dialogs/ProviderDialog.razor
@@ -71,58 +71,69 @@
@* ReSharper restore Asp.Entity *@
}
-
-
- @if (this.DataLLMProvider.IsLLMModelProvidedManually())
- {
-
- @T("Show available models")
-
-
- }
- else
- {
-
- @T("Load models")
-
- @if(this.availableModels.Count is 0)
+ @if (!this.DataLLMProvider.IsLLMModelSelectionHidden(this.DataHost))
+ {
+
+
+ @if (this.DataLLMProvider.IsLLMModelProvidedManually())
{
-
- @T("No models loaded or available.")
-
+
+ @T("Show available models")
+
+
}
else
{
-
- @foreach (var model in this.availableModels)
- {
-
- @model
-
- }
-
+
+ @T("Load models")
+
+ @if(this.availableModels.Count is 0)
+ {
+
+ @T("No models loaded or available.")
+
+ }
+ else
+ {
+
+ @foreach (var model in this.availableModels)
+ {
+
+ @model
+
+ }
+
+ }
}
+
+ @if (!string.IsNullOrWhiteSpace(this.dataLoadingModelsIssue))
+ {
+
+ @this.dataLoadingModelsIssue
+
}
-
- @if (!string.IsNullOrWhiteSpace(this.dataLoadingModelsIssue))
- {
-
- @this.dataLoadingModelsIssue
-
- }
-
+
+ }
+ else
+ {
+
+
+ @T("This host uses the model configured at the provider level. No model selection is available.")
+
+
+ }
@* ReSharper disable once CSharpWarnings::CS8974 *@
new Model(this.dataManuallyModel, null),
- _ => this.DataModel
- },
-
+ Model = model,
IsSelfHosted = this.DataLLMProvider is LLMProviders.SELF_HOSTED,
IsEnterpriseConfiguration = false,
Hostname = cleanedHostname.EndsWith('/') ? cleanedHostname[..^1] : cleanedHostname,
diff --git a/app/MindWork AI Studio/Dialogs/TranscriptionProviderDialog.razor b/app/MindWork AI Studio/Dialogs/TranscriptionProviderDialog.razor
index e8461b87..fc4d62e1 100644
--- a/app/MindWork AI Studio/Dialogs/TranscriptionProviderDialog.razor
+++ b/app/MindWork AI Studio/Dialogs/TranscriptionProviderDialog.razor
@@ -57,57 +57,68 @@
}
-
-
- @if (this.DataLLMProvider.IsTranscriptionModelProvidedManually(this.DataHost))
- {
-
- }
- else
- {
-
- @T("Load")
-
- @if(this.availableModels.Count is 0)
+ @if (!this.DataLLMProvider.IsTranscriptionModelSelectionHidden(this.DataHost))
+ {
+
+
+ @if (this.DataLLMProvider.IsTranscriptionModelProvidedManually(this.DataHost))
{
-
- @T("No models loaded or available.")
-
+
}
else
{
-
- @foreach (var model in this.availableModels)
- {
-
- @model
-
- }
-
+
+ @T("Load")
+
+ @if(this.availableModels.Count is 0)
+ {
+
+ @T("No models loaded or available.")
+
+ }
+ else
+ {
+
+ @foreach (var model in this.availableModels)
+ {
+
+ @model
+
+ }
+
+ }
}
+
+ @if (!string.IsNullOrWhiteSpace(this.dataLoadingModelsIssue))
+ {
+
+ @this.dataLoadingModelsIssue
+
}
-
- @if (!string.IsNullOrWhiteSpace(this.dataLoadingModelsIssue))
- {
-
- @this.dataLoadingModelsIssue
-
- }
-
+
+ }
+ else
+ {
+
+
+ @T("This host uses the model configured at the provider level. No model selection is available.")
+
+
+ }
@* ReSharper disable once CSharpWarnings::CS8974 *@
false,
};
+ ///
+ /// Determines if the model selection should be completely hidden for LLM providers.
+ /// This is the case when the host does not support model selection (e.g., llama.cpp).
+ ///
+ /// The provider.
+ /// The host for self-hosted providers.
+ /// True if model selection should be hidden; otherwise, false.
+ public static bool IsLLMModelSelectionHidden(this LLMProviders provider, Host host) => provider switch
+ {
+ LLMProviders.SELF_HOSTED => host is Host.LLAMA_CPP,
+ _ => false,
+ };
+
+ ///
+ /// Determines if the model selection should be completely hidden for transcription providers.
+ /// This is the case when the host does not support model selection (e.g., whisper.cpp).
+ ///
+ /// The provider.
+ /// The host for self-hosted providers.
+ /// True if model selection should be hidden; otherwise, false.
+ public static bool IsTranscriptionModelSelectionHidden(this LLMProviders provider, Host host) => provider switch
+ {
+ LLMProviders.SELF_HOSTED => host is Host.WHISPER_CPP,
+ _ => false,
+ };
+
public static bool IsHostNeeded(this LLMProviders provider) => provider switch
{
LLMProviders.SELF_HOSTED => true,
@@ -391,13 +417,13 @@ public static class LLMProvidersExtensions
{
case Host.NONE:
case Host.LLAMA_CPP:
+ case Host.WHISPER_CPP:
default:
return false;
case Host.OLLAMA:
case Host.LM_STUDIO:
case Host.VLLM:
- case Host.WHISPER_CPP:
return true;
}
}
diff --git a/app/MindWork AI Studio/Provider/Model.cs b/app/MindWork AI Studio/Provider/Model.cs
index 4e582f97..0cd43395 100644
--- a/app/MindWork AI Studio/Provider/Model.cs
+++ b/app/MindWork AI Studio/Provider/Model.cs
@@ -9,6 +9,22 @@ namespace AIStudio.Provider;
/// The model's display name.
public readonly record struct Model(string Id, string? DisplayName)
{
+ ///
+ /// Special model ID used when the model is selected by the system/host
+ /// and cannot be changed by the user (e.g., llama.cpp, whisper.cpp).
+ ///
+ private const string SYSTEM_MODEL_ID = "::system::";
+
+ ///
+ /// Creates a system-configured model placeholder.
+ ///
+ public static readonly Model SYSTEM_MODEL = new(SYSTEM_MODEL_ID, null);
+
+ ///
+ /// Checks if this model is the system-configured placeholder.
+ ///
+ public bool IsSystemModel => this == SYSTEM_MODEL;
+
private static string TB(string fallbackEN) => I18N.I.T(fallbackEN, typeof(Model).Namespace, nameof(Model));
#region Overrides of ValueType
diff --git a/app/MindWork AI Studio/wwwroot/changelog/v26.1.2.md b/app/MindWork AI Studio/wwwroot/changelog/v26.1.2.md
index e4a5f874..bdf0faa9 100644
--- a/app/MindWork AI Studio/wwwroot/changelog/v26.1.2.md
+++ b/app/MindWork AI Studio/wwwroot/changelog/v26.1.2.md
@@ -2,8 +2,9 @@
- Added the option to hide specific assistants by configuration plugins. This is useful for enterprise environments in organizations.
- Improved error handling for model loading in provider dialogs (LLMs, embeddings, transcriptions).
- Improved the microphone handling (transcription preview) so that all sound effects and the voice recording are processed without interruption.
+- Improved the handling of self-hosted providers in the configuration dialogs (LLMs, embeddings, and transcriptions) when the host cannot provide a list of models.
- Fixed a logging bug that prevented log events from being recorded in some cases.
-- Fixed a bug that allowed adding a provider without selecting a model.
+- Fixed a bug that allowed adding a provider (LLM, embedding, or transcription) without selecting a model.
- Fixed a bug with local transcription providers by handling errors correctly when the local provider is unavailable.
- Fixed a bug with local transcription providers by correctly handling empty model IDs.
- Fixed a bug affecting the transcription preview: previously, when you stopped music or other media, recorded or dictated text, and then tried to resume playback, the media wouldn’t resume as expected. This behavior is now fixed.
\ No newline at end of file