Added HasModelLoadingCapability to all providers (#737) (#737)
Some checks are pending
Build and Release / Determine run mode (push) Waiting to run
Build and Release / Read metadata (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-apple-darwin, osx-arm64, macos-latest, aarch64-apple-darwin, dmg,updater, dmg) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-pc-windows-msvc.exe, win-arm64, windows-latest, aarch64-pc-windows-msvc, nsis,updater, nsis) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-unknown-linux-gnu, linux-arm64, ubuntu-22.04-arm, aarch64-unknown-linux-gnu, appimage,deb,updater, appimage,deb) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-apple-darwin, osx-x64, macos-latest, x86_64-apple-darwin, dmg,updater, dmg) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-pc-windows-msvc.exe, win-x64, windows-latest, x86_64-pc-windows-msvc, nsis,updater, nsis) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-unknown-linux-gnu, linux-x64, ubuntu-22.04, x86_64-unknown-linux-gnu, appimage,deb,updater, appimage,deb) (push) Blocked by required conditions
Build and Release / Prepare & create release (push) Blocked by required conditions
Build and Release / Publish release (push) Blocked by required conditions

This commit is contained in:
Thorsten Sommer 2026-04-16 11:24:22 +02:00 committed by GitHub
parent 9d6d3842b5
commit 247c1b66b9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
19 changed files with 71 additions and 0 deletions

View File

@ -174,6 +174,9 @@ public sealed class ContentText : IContent
return false; return false;
} }
if (!provider.HasModelLoadingCapability)
return true;
IReadOnlyList<Model> loadedModels; IReadOnlyList<Model> loadedModels;
try try
{ {
@ -203,6 +206,11 @@ public sealed class ContentText : IContent
var availableModels = loadedModels.Where(model => !string.IsNullOrWhiteSpace(model.Id)).ToList(); var availableModels = loadedModels.Where(model => !string.IsNullOrWhiteSpace(model.Id)).ToList();
if (availableModels.Count == 0) if (availableModels.Count == 0)
{ {
var emptyModelsMessage = string.Format(
TB("We could load models from '{0}', but the provider did not return any usable text models."),
provider.InstanceName);
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, emptyModelsMessage));
LOGGER.LogWarning("Skipping AI request because there are no models available from '{ProviderInstanceName}' (provider={ProviderType}).", provider.InstanceName, provider.Provider); LOGGER.LogWarning("Skipping AI request because there are no models available from '{ProviderInstanceName}' (provider={ProviderType}).", provider.InstanceName, provider.Provider);
return false; return false;
} }

View File

@ -18,6 +18,9 @@ public sealed class ProviderAlibabaCloud() : BaseProvider(LLMProviders.ALIBABA_C
/// <inheritdoc /> /// <inheritdoc />
public override string InstanceName { get; set; } = "AlibabaCloud"; public override string InstanceName { get; set; } = "AlibabaCloud";
/// <inheritdoc />
public override bool HasModelLoadingCapability => true;
/// <inheritdoc /> /// <inheritdoc />
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default) public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{ {

View File

@ -14,10 +14,15 @@ public sealed class ProviderAnthropic() : BaseProvider(LLMProviders.ANTHROPIC, "
#region Implementation of IProvider #region Implementation of IProvider
/// <inheritdoc />
public override string Id => LLMProviders.ANTHROPIC.ToName(); public override string Id => LLMProviders.ANTHROPIC.ToName();
/// <inheritdoc />
public override string InstanceName { get; set; } = "Anthropic"; public override string InstanceName { get; set; } = "Anthropic";
/// <inheritdoc />
public override bool HasModelLoadingCapability => true;
/// <inheritdoc /> /// <inheritdoc />
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default) public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{ {

View File

@ -90,6 +90,9 @@ public abstract class BaseProvider : IProvider, ISecretId
/// <inheritdoc /> /// <inheritdoc />
public string AdditionalJsonApiParameters { get; init; } = string.Empty; public string AdditionalJsonApiParameters { get; init; } = string.Empty;
/// <inheritdoc />
public abstract bool HasModelLoadingCapability { get; }
/// <inheritdoc /> /// <inheritdoc />
public abstract IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, CancellationToken token = default); public abstract IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, CancellationToken token = default);

View File

@ -18,6 +18,9 @@ public sealed class ProviderDeepSeek() : BaseProvider(LLMProviders.DEEP_SEEK, "h
/// <inheritdoc /> /// <inheritdoc />
public override string InstanceName { get; set; } = "DeepSeek"; public override string InstanceName { get; set; } = "DeepSeek";
/// <inheritdoc />
public override bool HasModelLoadingCapability => true;
/// <inheritdoc /> /// <inheritdoc />
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default) public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{ {

View File

@ -18,6 +18,9 @@ public class ProviderFireworks() : BaseProvider(LLMProviders.FIREWORKS, "https:/
/// <inheritdoc /> /// <inheritdoc />
public override string InstanceName { get; set; } = "Fireworks.ai"; public override string InstanceName { get; set; } = "Fireworks.ai";
/// <inheritdoc />
public override bool HasModelLoadingCapability => false;
/// <inheritdoc /> /// <inheritdoc />
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default) public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{ {

View File

@ -18,6 +18,9 @@ public sealed class ProviderGWDG() : BaseProvider(LLMProviders.GWDG, "https://ch
/// <inheritdoc /> /// <inheritdoc />
public override string InstanceName { get; set; } = "GWDG SAIA"; public override string InstanceName { get; set; } = "GWDG SAIA";
/// <inheritdoc />
public override bool HasModelLoadingCapability => true;
/// <inheritdoc /> /// <inheritdoc />
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default) public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{ {

View File

@ -20,6 +20,9 @@ public class ProviderGoogle() : BaseProvider(LLMProviders.GOOGLE, "https://gener
/// <inheritdoc /> /// <inheritdoc />
public override string InstanceName { get; set; } = "Google Gemini"; public override string InstanceName { get; set; } = "Google Gemini";
/// <inheritdoc />
public override bool HasModelLoadingCapability => true;
/// <inheritdoc /> /// <inheritdoc />
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default) public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{ {

View File

@ -18,6 +18,9 @@ public class ProviderGroq() : BaseProvider(LLMProviders.GROQ, "https://api.groq.
/// <inheritdoc /> /// <inheritdoc />
public override string InstanceName { get; set; } = "Groq"; public override string InstanceName { get; set; } = "Groq";
/// <inheritdoc />
public override bool HasModelLoadingCapability => true;
/// <inheritdoc /> /// <inheritdoc />
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default) public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{ {

View File

@ -20,6 +20,9 @@ public sealed class ProviderHelmholtz() : BaseProvider(LLMProviders.HELMHOLTZ, "
/// <inheritdoc /> /// <inheritdoc />
public override string InstanceName { get; set; } = "Helmholtz Blablador"; public override string InstanceName { get; set; } = "Helmholtz Blablador";
/// <inheritdoc />
public override bool HasModelLoadingCapability => true;
/// <inheritdoc /> /// <inheritdoc />
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default) public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{ {

View File

@ -23,6 +23,9 @@ public sealed class ProviderHuggingFace : BaseProvider
/// <inheritdoc /> /// <inheritdoc />
public override string InstanceName { get; set; } = "HuggingFace"; public override string InstanceName { get; set; } = "HuggingFace";
/// <inheritdoc />
public override bool HasModelLoadingCapability => false;
/// <inheritdoc /> /// <inheritdoc />
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default) public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{ {

View File

@ -29,6 +29,12 @@ public interface IProvider
/// </summary> /// </summary>
public string AdditionalJsonApiParameters { get; } public string AdditionalJsonApiParameters { get; }
/// <summary>
/// Whether this provider instance can load available models from the backend/API.
/// This capability may differ by provider type, host, or modality.
/// </summary>
public bool HasModelLoadingCapability { get; }
/// <summary> /// <summary>
/// Starts a chat completion stream. /// Starts a chat completion stream.
/// </summary> /// </summary>

View File

@ -12,10 +12,15 @@ public sealed class ProviderMistral() : BaseProvider(LLMProviders.MISTRAL, "http
#region Implementation of IProvider #region Implementation of IProvider
/// <inheritdoc />
public override string Id => LLMProviders.MISTRAL.ToName(); public override string Id => LLMProviders.MISTRAL.ToName();
/// <inheritdoc />
public override string InstanceName { get; set; } = "Mistral"; public override string InstanceName { get; set; } = "Mistral";
/// <inheritdoc />
public override bool HasModelLoadingCapability => true;
/// <inheritdoc /> /// <inheritdoc />
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default) public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{ {

View File

@ -18,6 +18,9 @@ public class NoProvider : IProvider
/// <inheritdoc /> /// <inheritdoc />
public string AdditionalJsonApiParameters { get; init; } = string.Empty; public string AdditionalJsonApiParameters { get; init; } = string.Empty;
/// <inheritdoc />
public bool HasModelLoadingCapability => false;
public Task<ModelLoadResult> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) => Task.FromResult(ModelLoadResult.FromModels([])); public Task<ModelLoadResult> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) => Task.FromResult(ModelLoadResult.FromModels([]));
public Task<ModelLoadResult> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) => Task.FromResult(ModelLoadResult.FromModels([])); public Task<ModelLoadResult> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) => Task.FromResult(ModelLoadResult.FromModels([]));

View File

@ -23,6 +23,9 @@ public sealed class ProviderOpenAI() : BaseProvider(LLMProviders.OPEN_AI, "https
/// <inheritdoc /> /// <inheritdoc />
public override string InstanceName { get; set; } = "OpenAI"; public override string InstanceName { get; set; } = "OpenAI";
/// <inheritdoc />
public override bool HasModelLoadingCapability => true;
/// <inheritdoc /> /// <inheritdoc />
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default) public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{ {

View File

@ -22,6 +22,9 @@ public sealed class ProviderOpenRouter() : BaseProvider(LLMProviders.OPEN_ROUTER
/// <inheritdoc /> /// <inheritdoc />
public override string InstanceName { get; set; } = "OpenRouter"; public override string InstanceName { get; set; } = "OpenRouter";
/// <inheritdoc />
public override bool HasModelLoadingCapability => true;
/// <inheritdoc /> /// <inheritdoc />
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default) public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{ {

View File

@ -27,6 +27,9 @@ public sealed class ProviderPerplexity() : BaseProvider(LLMProviders.PERPLEXITY,
/// <inheritdoc /> /// <inheritdoc />
public override string InstanceName { get; set; } = "Perplexity"; public override string InstanceName { get; set; } = "Perplexity";
/// <inheritdoc />
public override bool HasModelLoadingCapability => true;
/// <inheritdoc /> /// <inheritdoc />
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default) public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{ {

View File

@ -16,10 +16,15 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
#region Implementation of IProvider #region Implementation of IProvider
/// <inheritdoc />
public override string Id => LLMProviders.SELF_HOSTED.ToName(); public override string Id => LLMProviders.SELF_HOSTED.ToName();
/// <inheritdoc />
public override string InstanceName { get; set; } = "Self-hosted"; public override string InstanceName { get; set; } = "Self-hosted";
/// <inheritdoc />
public override bool HasModelLoadingCapability => host is Host.OLLAMA or Host.LM_STUDIO or Host.VLLM;
/// <inheritdoc /> /// <inheritdoc />
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default) public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{ {

View File

@ -18,6 +18,9 @@ public sealed class ProviderX() : BaseProvider(LLMProviders.X, "https://api.x.ai
/// <inheritdoc /> /// <inheritdoc />
public override string InstanceName { get; set; } = "xAI"; public override string InstanceName { get; set; } = "xAI";
/// <inheritdoc />
public override bool HasModelLoadingCapability => true;
/// <inheritdoc /> /// <inheritdoc />
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default) public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{ {