Improved error handling for model loading (#732)
Some checks are pending
Build and Release / Determine run mode (push) Waiting to run
Build and Release / Read metadata (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-apple-darwin, osx-arm64, macos-latest, aarch64-apple-darwin, dmg,updater, dmg) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-pc-windows-msvc.exe, win-arm64, windows-latest, aarch64-pc-windows-msvc, nsis,updater, nsis) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-unknown-linux-gnu, linux-arm64, ubuntu-22.04-arm, aarch64-unknown-linux-gnu, appimage,deb,updater, appimage,deb) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-apple-darwin, osx-x64, macos-latest, x86_64-apple-darwin, dmg,updater, dmg) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-pc-windows-msvc.exe, win-x64, windows-latest, x86_64-pc-windows-msvc, nsis,updater, nsis) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-unknown-linux-gnu, linux-x64, ubuntu-22.04, x86_64-unknown-linux-gnu, appimage,deb,updater, appimage,deb) (push) Blocked by required conditions
Build and Release / Prepare & create release (push) Blocked by required conditions
Build and Release / Publish release (push) Blocked by required conditions

This commit is contained in:
Thorsten Sommer 2026-04-14 13:39:11 +02:00 committed by GitHub
parent d494fe4bc7
commit da62814b2f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
29 changed files with 606 additions and 557 deletions

View File

@ -6205,6 +6205,21 @@ UI_TEXT_CONTENT["AISTUDIO::PROVIDER::LLMPROVIDERSEXTENSIONS::T3424652889"] = "Un
-- no model selected
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODEL::T2234274832"] = "no model selected"
-- We could not load models from '{0}'. The account or API key does not have the required permissions.
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODELLOADFAILUREREASONEXTENSIONS::T1143085203"] = "We could not load models from '{0}'. The account or API key does not have the required permissions."
-- We could not load models from '{0}'. The API key is probably missing, invalid, or expired.
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODELLOADFAILUREREASONEXTENSIONS::T2041046579"] = "We could not load models from '{0}'. The API key is probably missing, invalid, or expired."
-- We could not load models from '{0}' because the provider is currently unavailable or could not be reached.
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODELLOADFAILUREREASONEXTENSIONS::T2115688703"] = "We could not load models from '{0}' because the provider is currently unavailable or could not be reached."
-- We could not load models from '{0}' because the provider returned an unexpected response.
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODELLOADFAILUREREASONEXTENSIONS::T2186844789"] = "We could not load models from '{0}' because the provider returned an unexpected response."
-- We could not load models from '{0}' due to an unknown error.
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODELLOADFAILUREREASONEXTENSIONS::T3907712809"] = "We could not load models from '{0}' due to an unknown error."
-- Model as configured by whisper.cpp
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::SELFHOSTED::PROVIDERSELFHOSTED::T3313940770"] = "Model as configured by whisper.cpp"

View File

@ -174,10 +174,21 @@ public sealed class ContentText : IContent
return false;
}
IEnumerable<Model> loadedModels;
IReadOnlyList<Model> loadedModels;
try
{
loadedModels = await provider.GetTextModels(token: token);
var modelLoadResult = await provider.GetTextModels(token: token);
if (!modelLoadResult.Success)
{
var userMessage = modelLoadResult.FailureReason.ToUserMessage(provider.InstanceName);
if (!string.IsNullOrWhiteSpace(userMessage))
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.CloudOff, userMessage));
LOGGER.LogWarning("Skipping selected model availability check for '{ProviderInstanceName}' (provider={ProviderType}) because loading the model list failed with reason {FailureReason}.", provider.InstanceName, provider.Provider, modelLoadResult.FailureReason);
return false;
}
loadedModels = modelLoadResult.Models;
}
catch (OperationCanceledException)
{

View File

@ -285,10 +285,12 @@ public partial class EmbeddingProviderDialog : MSGComponentBase, ISecretId
try
{
var models = await provider.GetEmbeddingModels(this.dataAPIKey);
var result = await provider.GetEmbeddingModels(this.dataAPIKey);
if (!result.Success)
this.dataLoadingModelsIssue = result.FailureReason.ToUserMessage(provider.InstanceName);
// Order descending by ID means that the newest models probably come first:
var orderedModels = models.OrderByDescending(n => n.Id);
var orderedModels = result.Models.OrderByDescending(n => n.Id);
this.availableModels.Clear();
this.availableModels.AddRange(orderedModels);

View File

@ -312,10 +312,12 @@ public partial class ProviderDialog : MSGComponentBase, ISecretId
try
{
var models = await provider.GetTextModels(this.dataAPIKey);
var result = await provider.GetTextModels(this.dataAPIKey);
if (!result.Success)
this.dataLoadingModelsIssue = result.FailureReason.ToUserMessage(provider.InstanceName);
// Order descending by ID means that the newest models probably come first:
var orderedModels = models.OrderByDescending(n => n.Id);
var orderedModels = result.Models.OrderByDescending(n => n.Id);
this.availableModels.Clear();
this.availableModels.AddRange(orderedModels);

View File

@ -300,10 +300,12 @@ public partial class TranscriptionProviderDialog : MSGComponentBase, ISecretId
try
{
var models = await provider.GetTranscriptionModels(this.dataAPIKey);
var result = await provider.GetTranscriptionModels(this.dataAPIKey);
if (!result.Success)
this.dataLoadingModelsIssue = result.FailureReason.ToUserMessage(provider.InstanceName);
// Order descending by ID means that the newest models probably come first:
var orderedModels = models.OrderByDescending(n => n.Id);
var orderedModels = result.Models.OrderByDescending(n => n.Id);
this.availableModels.Clear();
this.availableModels.AddRange(orderedModels);

View File

@ -6207,6 +6207,21 @@ UI_TEXT_CONTENT["AISTUDIO::PROVIDER::LLMPROVIDERSEXTENSIONS::T3424652889"] = "Un
-- no model selected
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODEL::T2234274832"] = "Kein Modell ausgewählt"
-- We could not load models from '{0}'. The account or API key does not have the required permissions.
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODELLOADFAILUREREASONEXTENSIONS::T1143085203"] = "Wir konnten keine Modelle von '{0}' laden. Das Konto oder der API-Schlüssel verfügt nicht über die erforderlichen Berechtigungen."
-- We could not load models from '{0}'. The API key is probably missing, invalid, or expired.
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODELLOADFAILUREREASONEXTENSIONS::T2041046579"] = "Modelle aus '{0}' konnten nicht geladen werden. Wahrscheinlich fehlt der API-Schlüssel, ist ungültig oder abgelaufen."
-- We could not load models from '{0}' because the provider is currently unavailable or could not be reached.
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODELLOADFAILUREREASONEXTENSIONS::T2115688703"] = "Wir konnten keine Modelle von '{0}' laden, da der Anbieter derzeit nicht verfügbar oder nicht erreichbar ist."
-- We could not load models from '{0}' because the provider returned an unexpected response.
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODELLOADFAILUREREASONEXTENSIONS::T2186844789"] = "Wir konnten keine Modelle von '{0}' laden, da der Anbieter eine unerwartete Antwort zurückgegeben hat."
-- We could not load models from '{0}' due to an unknown error.
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODELLOADFAILUREREASONEXTENSIONS::T3907712809"] = "Wir konnten die Modelle aus '{0}' aufgrund eines unbekannten Fehlers nicht laden."
-- Model as configured by whisper.cpp
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::SELFHOSTED::PROVIDERSELFHOSTED::T3313940770"] = "Modell wie in whisper.cpp konfiguriert"

View File

@ -6207,6 +6207,21 @@ UI_TEXT_CONTENT["AISTUDIO::PROVIDER::LLMPROVIDERSEXTENSIONS::T3424652889"] = "Un
-- no model selected
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODEL::T2234274832"] = "no model selected"
-- We could not load models from '{0}'. The account or API key does not have the required permissions.
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODELLOADFAILUREREASONEXTENSIONS::T1143085203"] = "We could not load models from '{0}'. The account or API key does not have the required permissions."
-- We could not load models from '{0}'. The API key is probably missing, invalid, or expired.
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODELLOADFAILUREREASONEXTENSIONS::T2041046579"] = "We could not load models from '{0}'. The API key is probably missing, invalid, or expired."
-- We could not load models from '{0}' because the provider is currently unavailable or could not be reached.
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODELLOADFAILUREREASONEXTENSIONS::T2115688703"] = "We could not load models from '{0}' because the provider is currently unavailable or could not be reached."
-- We could not load models from '{0}' because the provider returned an unexpected response.
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODELLOADFAILUREREASONEXTENSIONS::T2186844789"] = "We could not load models from '{0}' because the provider returned an unexpected response."
-- We could not load models from '{0}' due to an unknown error.
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODELLOADFAILUREREASONEXTENSIONS::T3907712809"] = "We could not load models from '{0}' due to an unknown error."
-- Model as configured by whisper.cpp
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::SELFHOSTED::PROVIDERSELFHOSTED::T3313940770"] = "Model as configured by whisper.cpp"

View File

@ -1,5 +1,4 @@
using System.Net.Http.Headers;
using System.Runtime.CompilerServices;
using System.Runtime.CompilerServices;
using AIStudio.Chat;
using AIStudio.Provider.OpenAI;
@ -71,7 +70,7 @@ public sealed class ProviderAlibabaCloud() : BaseProvider(LLMProviders.ALIBABA_C
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override async Task<ModelLoadResult> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
var additionalModels = new[]
{
@ -100,17 +99,21 @@ public sealed class ProviderAlibabaCloud() : BaseProvider(LLMProviders.ALIBABA_C
new Model("qwen2.5-vl-3b-instruct", "Qwen2.5-VL 3b"),
};
return this.LoadModels(["q"], SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional).ContinueWith(t => t.Result.Concat(additionalModels).OrderBy(x => x.Id).AsEnumerable(), token);
var result = await this.LoadModels(["q"], SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
return result with
{
Models = [..result.Models.Concat(additionalModels).OrderBy(x => x.Id)]
};
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override async Task<ModelLoadResult> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
var additionalModels = new[]
@ -118,45 +121,33 @@ public sealed class ProviderAlibabaCloud() : BaseProvider(LLMProviders.ALIBABA_C
new Model("text-embedding-v3", "text-embedding-v3"),
};
return this.LoadModels(["text-embedding-"], SecretStoreType.EMBEDDING_PROVIDER, token, apiKeyProvisional).ContinueWith(t => t.Result.Concat(additionalModels).OrderBy(x => x.Id).AsEnumerable(), token);
var result = await this.LoadModels(["text-embedding-"], SecretStoreType.EMBEDDING_PROVIDER, token, apiKeyProvisional);
return result with
{
Models = [..result.Models.Concat(additionalModels).OrderBy(x => x.Id)]
};
}
#region Overrides of BaseProvider
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
#endregion
#endregion
private async Task<IEnumerable<Model>> LoadModels(string[] prefixes, SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
private Task<ModelLoadResult> LoadModels(string[] prefixes, SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
{
var secretKey = apiKeyProvisional switch
{
not null => apiKeyProvisional,
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
{
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
_ => null,
}
};
if (secretKey is null)
return [];
using var request = new HttpRequestMessage(HttpMethod.Get, "models");
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
using var response = await this.httpClient.SendAsync(request, token);
if(!response.IsSuccessStatusCode)
return [];
var modelResponse = await response.Content.ReadFromJsonAsync<ModelsResponse>(token);
return modelResponse.Data.Where(model => prefixes.Any(prefix => model.Id.StartsWith(prefix, StringComparison.InvariantCulture)));
return this.LoadModelsResponse<ModelsResponse>(
storeType,
"models",
modelResponse => modelResponse.Data.Where(model => prefixes.Any(prefix => model.Id.StartsWith(prefix, StringComparison.InvariantCulture))),
token,
apiKeyProvisional);
}
}

View File

@ -1,4 +1,3 @@
using System.Net.Http.Headers;
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.Json;
@ -124,7 +123,7 @@ public sealed class ProviderAnthropic() : BaseProvider(LLMProviders.ANTHROPIC, "
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override async Task<ModelLoadResult> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
var additionalModels = new[]
{
@ -136,59 +135,52 @@ public sealed class ProviderAnthropic() : BaseProvider(LLMProviders.ANTHROPIC, "
new Model("claude-3-opus-latest", "Claude 3 Opus (Latest)"),
};
return this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional).ContinueWith(t => t.Result.Concat(additionalModels).OrderBy(x => x.Id).AsEnumerable(), token);
var result = await this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
return result with
{
Models = [..result.Models.Concat(additionalModels).OrderBy(x => x.Id)]
};
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
#endregion
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
private Task<ModelLoadResult> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
{
var secretKey = apiKeyProvisional switch
{
not null => apiKeyProvisional,
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
return this.LoadModelsResponse<ModelsResponse>(
storeType,
"models?limit=100",
modelResponse => modelResponse.Data,
token,
apiKeyProvisional,
failureReasonSelector: (response, _) => response.StatusCode switch
{
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
_ => null,
}
};
if (secretKey is null)
return [];
using var request = new HttpRequestMessage(HttpMethod.Get, "models?limit=100");
// Set the authorization header:
request.Headers.Add("x-api-key", secretKey);
// Set the Anthropic version:
request.Headers.Add("anthropic-version", "2023-06-01");
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
using var response = await this.httpClient.SendAsync(request, token);
if(!response.IsSuccessStatusCode)
return [];
var modelResponse = await response.Content.ReadFromJsonAsync<ModelsResponse>(JSON_SERIALIZER_OPTIONS, token);
return modelResponse.Data;
System.Net.HttpStatusCode.Unauthorized => ModelLoadFailureReason.INVALID_OR_MISSING_API_KEY,
System.Net.HttpStatusCode.Forbidden => ModelLoadFailureReason.AUTHENTICATION_OR_PERMISSION_ERROR,
_ => ModelLoadFailureReason.PROVIDER_UNAVAILABLE,
},
requestConfigurator: (request, secretKey) =>
{
request.Headers.Add("x-api-key", secretKey);
request.Headers.Add("anthropic-version", "2023-06-01");
},
jsonSerializerOptions: JSON_SERIALIZER_OPTIONS);
}
}

View File

@ -29,7 +29,7 @@ public abstract class BaseProvider : IProvider, ISecretId
/// <summary>
/// The HTTP client to use it for all requests.
/// </summary>
protected readonly HttpClient httpClient = new();
protected readonly HttpClient HttpClient = new();
/// <summary>
/// The logger to use.
@ -73,7 +73,7 @@ public abstract class BaseProvider : IProvider, ISecretId
this.Provider = provider;
// Set the base URL:
this.httpClient.BaseAddress = new(url);
this.HttpClient.BaseAddress = new(url);
}
#region Handling of IProvider, which all providers must implement
@ -103,16 +103,16 @@ public abstract class BaseProvider : IProvider, ISecretId
public abstract Task<IReadOnlyList<IReadOnlyList<float>>> EmbedTextAsync(Model embeddingModel, SettingsManager settingsManager, CancellationToken token = default, params List<string> texts);
/// <inheritdoc />
public abstract Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default);
public abstract Task<ModelLoadResult> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default);
/// <inheritdoc />
public abstract Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default);
public abstract Task<ModelLoadResult> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default);
/// <inheritdoc />
public abstract Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default);
public abstract Task<ModelLoadResult> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default);
/// <inheritdoc />
public abstract Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default);
public abstract Task<ModelLoadResult> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default);
#endregion
@ -129,6 +129,71 @@ public abstract class BaseProvider : IProvider, ISecretId
#endregion
protected static ModelLoadResult SuccessfulModelLoadResult(IEnumerable<Model> models) => ModelLoadResult.FromModels(models);
protected static ModelLoadResult FailedModelLoadResult(ModelLoadFailureReason failureReason, string? technicalDetails = null) => ModelLoadResult.Failure(failureReason, technicalDetails);
protected async Task<string?> GetModelLoadingSecretKey(SecretStoreType storeType, string? apiKeyProvisional = null, bool isTryingSecret = false) => apiKeyProvisional switch
{
not null => apiKeyProvisional,
_ => await RUST_SERVICE.GetAPIKey(this, storeType, isTrying: isTryingSecret) switch
{
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
_ => null,
}
};
protected static ModelLoadFailureReason GetDefaultModelLoadFailureReason(HttpResponseMessage response) => response.StatusCode switch
{
HttpStatusCode.Unauthorized => ModelLoadFailureReason.INVALID_OR_MISSING_API_KEY,
HttpStatusCode.Forbidden => ModelLoadFailureReason.AUTHENTICATION_OR_PERMISSION_ERROR,
_ => ModelLoadFailureReason.PROVIDER_UNAVAILABLE,
};
protected async Task<ModelLoadResult> LoadModelsResponse<TResponse>(
SecretStoreType storeType,
string requestPath,
Func<TResponse, IEnumerable<Model>> modelFactory,
CancellationToken token,
string? apiKeyProvisional = null,
Func<HttpResponseMessage, string, ModelLoadFailureReason>? failureReasonSelector = null,
Action<HttpRequestMessage, string>? requestConfigurator = null,
JsonSerializerOptions? jsonSerializerOptions = null,
bool isTryingSecret = false)
{
var secretKey = await this.GetModelLoadingSecretKey(storeType, apiKeyProvisional, isTryingSecret);
if (string.IsNullOrWhiteSpace(secretKey) && !isTryingSecret)
return FailedModelLoadResult(ModelLoadFailureReason.INVALID_OR_MISSING_API_KEY, "No API key available for model loading.");
using var request = new HttpRequestMessage(HttpMethod.Get, requestPath);
if (requestConfigurator is not null)
requestConfigurator(request, secretKey ?? string.Empty);
else if (!string.IsNullOrWhiteSpace(secretKey))
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
using var response = await this.HttpClient.SendAsync(request, token);
var responseBody = await response.Content.ReadAsStringAsync(token);
if (!response.IsSuccessStatusCode)
{
var failureReason = failureReasonSelector?.Invoke(response, responseBody) ?? GetDefaultModelLoadFailureReason(response);
return FailedModelLoadResult(failureReason, $"Status={(int)response.StatusCode} {response.ReasonPhrase}; Body='{responseBody}'");
}
try
{
var parsedResponse = JsonSerializer.Deserialize<TResponse>(responseBody, jsonSerializerOptions ?? JSON_SERIALIZER_OPTIONS);
if (parsedResponse is null)
return FailedModelLoadResult(ModelLoadFailureReason.INVALID_RESPONSE, "Model list response could not be deserialized.");
return SuccessfulModelLoadResult(modelFactory(parsedResponse));
}
catch (Exception e)
{
return FailedModelLoadResult(ModelLoadFailureReason.INVALID_RESPONSE, e.Message);
}
}
/// <summary>
/// Sends a request and handles rate limiting by exponential backoff.
/// </summary>
@ -155,7 +220,7 @@ public abstract class BaseProvider : IProvider, ISecretId
// Please notice: We do not dispose the response here. The caller is responsible
// for disposing the response object. This is important because the response
// object is used to read the stream.
var nextResponse = await this.httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, token);
var nextResponse = await this.HttpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, token);
if (nextResponse.IsSuccessStatusCode)
{
response = nextResponse;
@ -696,7 +761,7 @@ public abstract class BaseProvider : IProvider, ISecretId
break;
}
using var response = await this.httpClient.SendAsync(request, token);
using var response = await this.HttpClient.SendAsync(request, token);
var responseBody = response.Content.ReadAsStringAsync(token).Result;
if (!response.IsSuccessStatusCode)
@ -766,7 +831,7 @@ public abstract class BaseProvider : IProvider, ISecretId
// Set the content:
request.Content = new StringContent(embeddingRequest, Encoding.UTF8, "application/json");
using var response = await this.httpClient.SendAsync(request, token);
using var response = await this.HttpClient.SendAsync(request, token);
var responseBody = response.Content.ReadAsStringAsync(token).Result;
if (!response.IsSuccessStatusCode)

View File

@ -1,4 +1,3 @@
using System.Net.Http.Headers;
using System.Runtime.CompilerServices;
using AIStudio.Chat;
@ -70,54 +69,38 @@ public sealed class ProviderDeepSeek() : BaseProvider(LLMProviders.DEEP_SEEK, "h
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
#endregion
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
private Task<ModelLoadResult> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
{
var secretKey = apiKeyProvisional switch
{
not null => apiKeyProvisional,
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
{
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
_ => null,
}
};
if (secretKey is null)
return [];
using var request = new HttpRequestMessage(HttpMethod.Get, "models");
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
using var response = await this.httpClient.SendAsync(request, token);
if(!response.IsSuccessStatusCode)
return [];
var modelResponse = await response.Content.ReadFromJsonAsync<ModelsResponse>(token);
return modelResponse.Data;
return this.LoadModelsResponse<ModelsResponse>(
storeType,
"models",
modelResponse => modelResponse.Data,
token,
apiKeyProvisional);
}
}

View File

@ -71,33 +71,32 @@ public class ProviderFireworks() : BaseProvider(LLMProviders.FIREWORKS, "https:/
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
// Source: https://docs.fireworks.ai/api-reference/audio-transcriptions#param-model
return Task.FromResult<IEnumerable<Model>>(
new List<Model>
{
new("whisper-v3", "Whisper v3"),
// new("whisper-v3-turbo", "Whisper v3 Turbo"), // does not work
});
return Task.FromResult(ModelLoadResult.FromModels(
[
new Model("whisper-v3", "Whisper v3"),
// new("whisper-v3-turbo", "Whisper v3 Turbo"), // does not work
]));
}
#endregion

View File

@ -1,5 +1,4 @@
using System.Net.Http.Headers;
using System.Runtime.CompilerServices;
using System.Runtime.CompilerServices;
using AIStudio.Chat;
using AIStudio.Provider.OpenAI;
@ -71,61 +70,55 @@ public sealed class ProviderGWDG() : BaseProvider(LLMProviders.GWDG, "https://ch
}
/// <inheritdoc />
public override async Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override async Task<ModelLoadResult> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
var models = await this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
return models.Where(model => !model.Id.StartsWith("e5-mistral-7b-instruct", StringComparison.InvariantCultureIgnoreCase));
var result = await this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
return result with
{
Models = [..result.Models.Where(model => !model.Id.StartsWith("e5-mistral-7b-instruct", StringComparison.InvariantCultureIgnoreCase))]
};
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override async Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override async Task<ModelLoadResult> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
var models = await this.LoadModels(SecretStoreType.EMBEDDING_PROVIDER, token, apiKeyProvisional);
return models.Where(model => model.Id.StartsWith("e5-", StringComparison.InvariantCultureIgnoreCase));
var result = await this.LoadModels(SecretStoreType.EMBEDDING_PROVIDER, token, apiKeyProvisional);
return result with
{
Models = [..result.Models.Where(model => model.Id.StartsWith("e5-", StringComparison.InvariantCultureIgnoreCase))]
};
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
// Source: https://docs.hpc.gwdg.de/services/saia/index.html#voice-to-text
return Task.FromResult<IEnumerable<Model>>(
new List<Model>
{
new("whisper-large-v2", "Whisper v2 Large"),
});
return Task.FromResult(ModelLoadResult.FromModels(
[
new Model("whisper-large-v2", "Whisper v2 Large"),
]));
}
#endregion
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
private async Task<ModelLoadResult> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
{
var secretKey = apiKeyProvisional switch
{
not null => apiKeyProvisional,
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
{
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
_ => null,
}
};
var result = await this.LoadModelsResponse<ModelsResponse>(
storeType,
"models",
modelResponse => modelResponse.Data,
token,
apiKeyProvisional);
if (secretKey is null)
return [];
if (!result.Success)
LOGGER.LogWarning("Failed to load models for provider {ProviderId}. FailureReason: {FailureReason}. TechnicalDetails: {TechnicalDetails}", this.Id, result.FailureReason, result.TechnicalDetails);
using var request = new HttpRequestMessage(HttpMethod.Get, "models");
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
using var response = await this.httpClient.SendAsync(request, token);
if(!response.IsSuccessStatusCode)
return [];
var modelResponse = await response.Content.ReadFromJsonAsync<ModelsResponse>(token);
return modelResponse.Data;
return result;
}
}

View File

@ -1,4 +1,3 @@
using System.Net.Http.Headers;
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.Json;
@ -107,7 +106,7 @@ public class ProviderGoogle() : BaseProvider(LLMProviders.GOOGLE, "https://gener
// Set the content:
request.Content = new StringContent(embeddingRequest, Encoding.UTF8, "application/json");
using var response = await this.httpClient.SendAsync(request, token);
using var response = await this.HttpClient.SendAsync(request, token);
var responseBody = await response.Content.ReadAsStringAsync(token);
if (!response.IsSuccessStatusCode)
@ -139,80 +138,64 @@ public class ProviderGoogle() : BaseProvider(LLMProviders.GOOGLE, "https://gener
}
/// <inheritdoc />
public override async Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override async Task<ModelLoadResult> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
var models = await this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
return models.Where(model =>
model.Id.StartsWith("gemini-", StringComparison.OrdinalIgnoreCase) &&
!this.IsEmbeddingModel(model.Id))
.Select(this.WithDisplayNameFallback);
var result = await this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
return result with
{
Models =
[
..result.Models.Where(model =>
model.Id.StartsWith("gemini-", StringComparison.OrdinalIgnoreCase) &&
!this.IsEmbeddingModel(model.Id))
.Select(this.WithDisplayNameFallback)
]
};
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
public override async Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override async Task<ModelLoadResult> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
var models = await this.LoadModels(SecretStoreType.EMBEDDING_PROVIDER, token, apiKeyProvisional);
return models.Where(model => this.IsEmbeddingModel(model.Id))
.Select(this.WithDisplayNameFallback);
var result = await this.LoadModels(SecretStoreType.EMBEDDING_PROVIDER, token, apiKeyProvisional);
return result with
{
Models =
[
..result.Models.Where(model => this.IsEmbeddingModel(model.Id))
.Select(this.WithDisplayNameFallback)
]
};
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
#endregion
private async Task<IReadOnlyList<Model>> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
private Task<ModelLoadResult> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
{
var secretKey = apiKeyProvisional switch
{
not null => apiKeyProvisional,
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
{
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
_ => null,
}
};
if (string.IsNullOrWhiteSpace(secretKey))
return [];
using var request = new HttpRequestMessage(HttpMethod.Get, "models");
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
using var response = await this.httpClient.SendAsync(request, token);
if(!response.IsSuccessStatusCode)
{
LOGGER.LogError("Failed to load models with status code {ResponseStatusCode} and body: '{ResponseBody}'.", response.StatusCode, await response.Content.ReadAsStringAsync(token));
return [];
}
try
{
var modelResponse = await response.Content.ReadFromJsonAsync<ModelsResponse>(token);
if (modelResponse == default || modelResponse.Data.Count is 0)
{
LOGGER.LogError("Google model list response did not contain a valid data array.");
return [];
}
return modelResponse.Data
return this.LoadModelsResponse<ModelsResponse>(
storeType,
"models",
modelResponse => modelResponse.Data
.Where(model => !string.IsNullOrWhiteSpace(model.Id))
.Select(model => new Model(this.NormalizeModelId(model.Id), model.DisplayName))
.ToArray();
}
catch (Exception e)
{
LOGGER.LogError("Failed to parse Google model list response: '{Message}'.", e.Message);
return [];
}
.Select(model => new Model(this.NormalizeModelId(model.Id), model.DisplayName)),
token,
apiKeyProvisional,
failureReasonSelector: (response, _) => response.StatusCode switch
{
System.Net.HttpStatusCode.Forbidden => ModelLoadFailureReason.AUTHENTICATION_OR_PERMISSION_ERROR,
System.Net.HttpStatusCode.Unauthorized => ModelLoadFailureReason.INVALID_OR_MISSING_API_KEY,
_ => ModelLoadFailureReason.PROVIDER_UNAVAILABLE,
});
}
private bool IsEmbeddingModel(string modelId)

View File

@ -1,4 +1,3 @@
using System.Net.Http.Headers;
using System.Runtime.CompilerServices;
using AIStudio.Chat;
@ -74,57 +73,41 @@ public class ProviderGroq() : BaseProvider(LLMProviders.GROQ, "https://api.groq.
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult<IEnumerable<Model>>([]);
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
#endregion
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
private Task<ModelLoadResult> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
{
var secretKey = apiKeyProvisional switch
{
not null => apiKeyProvisional,
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
{
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
_ => null,
}
};
if (secretKey is null)
return [];
using var request = new HttpRequestMessage(HttpMethod.Get, "models");
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
using var response = await this.httpClient.SendAsync(request, token);
if(!response.IsSuccessStatusCode)
return [];
var modelResponse = await response.Content.ReadFromJsonAsync<ModelsResponse>(token);
return modelResponse.Data.Where(n =>
!n.Id.StartsWith("whisper-", StringComparison.OrdinalIgnoreCase) &&
!n.Id.StartsWith("distil-", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("-tts", StringComparison.OrdinalIgnoreCase));
return this.LoadModelsResponse<ModelsResponse>(
storeType,
"models",
modelResponse => modelResponse.Data.Where(n =>
!n.Id.StartsWith("whisper-", StringComparison.OrdinalIgnoreCase) &&
!n.Id.StartsWith("distil-", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("-tts", StringComparison.OrdinalIgnoreCase)),
token,
apiKeyProvisional);
}
}

View File

@ -1,5 +1,6 @@
using System.Net.Http.Headers;
using System.Runtime.CompilerServices;
using System.Text.Json;
using AIStudio.Chat;
using AIStudio.Provider.OpenAI;
@ -71,60 +72,81 @@ public sealed class ProviderHelmholtz() : BaseProvider(LLMProviders.HELMHOLTZ, "
}
/// <inheritdoc />
public override async Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override async Task<ModelLoadResult> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
var models = await this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
return models.Where(model => !model.Id.StartsWith("text-", StringComparison.InvariantCultureIgnoreCase) &&
!model.Id.StartsWith("alias-embedding", StringComparison.InvariantCultureIgnoreCase));
var result = await this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
return result with
{
Models =
[
..result.Models.Where(model => !model.Id.StartsWith("text-", StringComparison.InvariantCultureIgnoreCase) &&
!model.Id.Contains("-embedding", StringComparison.InvariantCultureIgnoreCase)
)
]
};
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override async Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override async Task<ModelLoadResult> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
var models = await this.LoadModels(SecretStoreType.EMBEDDING_PROVIDER, token, apiKeyProvisional);
return models.Where(model =>
model.Id.StartsWith("alias-embedding", StringComparison.InvariantCultureIgnoreCase) ||
model.Id.StartsWith("text-", StringComparison.InvariantCultureIgnoreCase) ||
model.Id.Contains("gritlm", StringComparison.InvariantCultureIgnoreCase));
var result = await this.LoadModels(SecretStoreType.EMBEDDING_PROVIDER, token, apiKeyProvisional);
return result with
{
Models =
[
..result.Models.Where(model =>
model.Id.Contains("-embedding", StringComparison.InvariantCultureIgnoreCase) ||
model.Id.StartsWith("text-", StringComparison.InvariantCultureIgnoreCase) ||
model.Id.Contains("gritlm", StringComparison.InvariantCultureIgnoreCase))
]
};
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
#endregion
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
private async Task<ModelLoadResult> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
{
var secretKey = apiKeyProvisional switch
{
not null => apiKeyProvisional,
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
{
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
_ => null,
}
};
if (secretKey is null)
return [];
var secretKey = await this.GetModelLoadingSecretKey(storeType, apiKeyProvisional);
if (string.IsNullOrWhiteSpace(secretKey))
return FailedModelLoadResult(ModelLoadFailureReason.INVALID_OR_MISSING_API_KEY, "No API key available for model loading.");
using var request = new HttpRequestMessage(HttpMethod.Get, "models");
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
using var response = await this.httpClient.SendAsync(request, token);
if(!response.IsSuccessStatusCode)
return [];
using var response = await this.HttpClient.SendAsync(request, token);
var body = await response.Content.ReadAsStringAsync(token);
if (!response.IsSuccessStatusCode)
return FailedModelLoadResult(GetDefaultModelLoadFailureReason(response), $"Status={(int)response.StatusCode} {response.ReasonPhrase}; Body='{body}'");
var modelResponse = await response.Content.ReadFromJsonAsync<ModelsResponse>(token);
return modelResponse.Data;
try
{
var modelResponse = JsonSerializer.Deserialize<ModelsResponse>(body, JSON_SERIALIZER_OPTIONS);
return SuccessfulModelLoadResult(modelResponse.Data);
}
catch (JsonException e)
{
if (body.Contains("API key", StringComparison.InvariantCultureIgnoreCase))
return FailedModelLoadResult(ModelLoadFailureReason.INVALID_OR_MISSING_API_KEY, body);
LOGGER.LogError(e, "Unexpected error while parsing models from Helmholtz API response. Status Code: {StatusCode}. Reason: {ReasonPhrase}. Response Body: '{ResponseBody}'", response.StatusCode, response.ReasonPhrase, body);
return FailedModelLoadResult(ModelLoadFailureReason.INVALID_RESPONSE, body);
}
catch (Exception e)
{
LOGGER.LogError(e, "Unexpected error while loading models from Helmholtz API. Status Code: {StatusCode}. Reason: {ReasonPhrase}", response.StatusCode, response.ReasonPhrase);
return FailedModelLoadResult(ModelLoadFailureReason.UNKNOWN, e.Message);
}
}
}

View File

@ -74,27 +74,27 @@ public sealed class ProviderHuggingFace : BaseProvider
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
#endregion

View File

@ -76,7 +76,7 @@ public interface IProvider
/// <param name="apiKeyProvisional">The provisional API key to use. Useful when the user is adding a new provider. When null, the stored API key is used.</param>
/// <param name="token">The cancellation token.</param>
/// <returns>The list of text models.</returns>
public Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default);
public Task<ModelLoadResult> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default);
/// <summary>
/// Load all possible image models that can be used with this provider.
@ -84,7 +84,7 @@ public interface IProvider
/// <param name="apiKeyProvisional">The provisional API key to use. Useful when the user is adding a new provider. When null, the stored API key is used.</param>
/// <param name="token">The cancellation token.</param>
/// <returns>The list of image models.</returns>
public Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default);
public Task<ModelLoadResult> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default);
/// <summary>
/// Load all possible embedding models that can be used with this provider.
@ -92,7 +92,7 @@ public interface IProvider
/// <param name="apiKeyProvisional">The provisional API key to use. Useful when the user is adding a new provider. When null, the stored API key is used.</param>
/// <param name="token">The cancellation token.</param>
/// <returns>The list of embedding models.</returns>
public Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default);
public Task<ModelLoadResult> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default);
/// <summary>
/// Load all possible transcription models that can be used with this provider.
@ -100,5 +100,5 @@ public interface IProvider
/// <param name="apiKeyProvisional">The provisional API key to use. Useful when the user is adding a new provider. When null, the stored API key is used.</param>
/// <param name="token">>The cancellation token.</param>
/// <returns>>The list of transcription models.</returns>
public Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default);
public Task<ModelLoadResult> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default);
}

View File

@ -1,4 +1,3 @@
using System.Net.Http.Headers;
using System.Runtime.CompilerServices;
using AIStudio.Chat;
@ -77,72 +76,62 @@ public sealed class ProviderMistral() : BaseProvider(LLMProviders.MISTRAL, "http
}
/// <inheritdoc />
public override async Task<IEnumerable<Provider.Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override async Task<ModelLoadResult> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
var modelResponse = await this.LoadModelList(SecretStoreType.LLM_PROVIDER, apiKeyProvisional, token);
if(modelResponse == default)
return [];
if(!modelResponse.Success)
return modelResponse;
return modelResponse.Data.Where(n =>
!n.Id.StartsWith("code", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("embed", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("moderation", StringComparison.OrdinalIgnoreCase))
.Select(n => new Provider.Model(n.Id, null));
return modelResponse with
{
Models =
[
..modelResponse.Models.Where(n =>
!n.Id.StartsWith("code", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("embed", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("moderation", StringComparison.OrdinalIgnoreCase))
]
};
}
/// <inheritdoc />
public override async Task<IEnumerable<Provider.Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override async Task<ModelLoadResult> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
var modelResponse = await this.LoadModelList(SecretStoreType.EMBEDDING_PROVIDER, apiKeyProvisional, token);
if(modelResponse == default)
return [];
if(!modelResponse.Success)
return modelResponse;
return modelResponse.Data.Where(n => n.Id.Contains("embed", StringComparison.InvariantCulture))
.Select(n => new Provider.Model(n.Id, null));
return modelResponse with
{
Models = [..modelResponse.Models.Where(n => n.Id.Contains("embed", StringComparison.InvariantCulture))]
};
}
/// <inheritdoc />
public override Task<IEnumerable<Provider.Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Provider.Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override Task<IEnumerable<Provider.Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
// Source: https://docs.mistral.ai/capabilities/audio_transcription
return Task.FromResult<IEnumerable<Provider.Model>>(
new List<Provider.Model>
{
new("voxtral-mini-latest", "Voxtral Mini Latest"),
});
return Task.FromResult(ModelLoadResult.FromModels(
[
new Provider.Model("voxtral-mini-latest", "Voxtral Mini Latest"),
]));
}
#endregion
private async Task<ModelsResponse> LoadModelList(SecretStoreType storeType, string? apiKeyProvisional, CancellationToken token)
private Task<ModelLoadResult> LoadModelList(SecretStoreType storeType, string? apiKeyProvisional, CancellationToken token)
{
var secretKey = apiKeyProvisional switch
{
not null => apiKeyProvisional,
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
{
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
_ => null,
}
};
if (secretKey is null)
return default;
using var request = new HttpRequestMessage(HttpMethod.Get, "models");
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
using var response = await this.httpClient.SendAsync(request, token);
if(!response.IsSuccessStatusCode)
return default;
var modelResponse = await response.Content.ReadFromJsonAsync<ModelsResponse>(token);
return modelResponse;
return this.LoadModelsResponse<ModelsResponse>(
storeType,
"models",
modelResponse => modelResponse.Data.Select(n => new Provider.Model(n.Id, null)),
token,
apiKeyProvisional);
}
}

View File

@ -0,0 +1,11 @@
namespace AIStudio.Provider;
public enum ModelLoadFailureReason
{
NONE,
INVALID_OR_MISSING_API_KEY,
AUTHENTICATION_OR_PERMISSION_ERROR,
PROVIDER_UNAVAILABLE,
INVALID_RESPONSE,
UNKNOWN,
}

View File

@ -0,0 +1,19 @@
using AIStudio.Tools.PluginSystem;
namespace AIStudio.Provider;
public static class ModelLoadFailureReasonExtensions
{
private static string TB(string fallbackEN) => I18N.I.T(fallbackEN, typeof(ModelLoadFailureReasonExtensions).Namespace, nameof(ModelLoadFailureReasonExtensions));
public static string ToUserMessage(this ModelLoadFailureReason failureReason, string providerName) => failureReason switch
{
ModelLoadFailureReason.INVALID_OR_MISSING_API_KEY => string.Format(TB("We could not load models from '{0}'. The API key is probably missing, invalid, or expired."), providerName),
ModelLoadFailureReason.AUTHENTICATION_OR_PERMISSION_ERROR => string.Format(TB("We could not load models from '{0}'. The account or API key does not have the required permissions."), providerName),
ModelLoadFailureReason.PROVIDER_UNAVAILABLE => string.Format(TB("We could not load models from '{0}' because the provider is currently unavailable or could not be reached."), providerName),
ModelLoadFailureReason.INVALID_RESPONSE => string.Format(TB("We could not load models from '{0}' because the provider returned an unexpected response."), providerName),
ModelLoadFailureReason.UNKNOWN => string.Format(TB("We could not load models from '{0}' due to an unknown error."), providerName),
_ => string.Empty,
};
}

View File

@ -0,0 +1,19 @@
namespace AIStudio.Provider;
public sealed record ModelLoadResult(
IReadOnlyList<Model> Models,
ModelLoadFailureReason FailureReason = ModelLoadFailureReason.NONE,
string? TechnicalDetails = null)
{
public bool Success => this.FailureReason is ModelLoadFailureReason.NONE;
public static ModelLoadResult FromModels(IEnumerable<Model> models)
{
return new([..models]);
}
public static ModelLoadResult Failure(ModelLoadFailureReason failureReason, string? technicalDetails = null)
{
return new([], failureReason, technicalDetails);
}
}

View File

@ -18,13 +18,13 @@ public class NoProvider : IProvider
/// <inheritdoc />
public string AdditionalJsonApiParameters { get; init; } = string.Empty;
public Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) => Task.FromResult<IEnumerable<Model>>([]);
public Task<ModelLoadResult> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) => Task.FromResult(ModelLoadResult.FromModels([]));
public Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) => Task.FromResult<IEnumerable<Model>>([]);
public Task<ModelLoadResult> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) => Task.FromResult(ModelLoadResult.FromModels([]));
public Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default) => Task.FromResult<IEnumerable<Model>>([]);
public Task<ModelLoadResult> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default) => Task.FromResult(ModelLoadResult.FromModels([]));
public Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default) => Task.FromResult<IEnumerable<Model>>([]);
public Task<ModelLoadResult> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default) => Task.FromResult(ModelLoadResult.FromModels([]));
public async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatChatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{

View File

@ -233,61 +233,57 @@ public sealed class ProviderOpenAI() : BaseProvider(LLMProviders.OPEN_AI, "https
}
/// <inheritdoc />
public override async Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override async Task<ModelLoadResult> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
var models = await this.LoadModels(SecretStoreType.LLM_PROVIDER, ["chatgpt-", "gpt-", "o1-", "o3-", "o4-"], token, apiKeyProvisional);
return models.Where(model => !model.Id.Contains("image", StringComparison.OrdinalIgnoreCase) &&
!model.Id.Contains("realtime", StringComparison.OrdinalIgnoreCase) &&
!model.Id.Contains("audio", StringComparison.OrdinalIgnoreCase) &&
!model.Id.Contains("tts", StringComparison.OrdinalIgnoreCase) &&
!model.Id.Contains("transcribe", StringComparison.OrdinalIgnoreCase));
var result = await this.LoadModels(SecretStoreType.LLM_PROVIDER, ["chatgpt-", "gpt-", "o1-", "o3-", "o4-"], token, apiKeyProvisional);
return result with
{
Models =
[
..result.Models.Where(model => !model.Id.Contains("image", StringComparison.OrdinalIgnoreCase) &&
!model.Id.Contains("realtime", StringComparison.OrdinalIgnoreCase) &&
!model.Id.Contains("audio", StringComparison.OrdinalIgnoreCase) &&
!model.Id.Contains("tts", StringComparison.OrdinalIgnoreCase) &&
!model.Id.Contains("transcribe", StringComparison.OrdinalIgnoreCase))
]
};
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return this.LoadModels(SecretStoreType.IMAGE_PROVIDER, ["dall-e-", "gpt-image"], token, apiKeyProvisional);
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return this.LoadModels(SecretStoreType.EMBEDDING_PROVIDER, ["text-embedding-"], token, apiKeyProvisional);
}
/// <inheritdoc />
public override async Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override async Task<ModelLoadResult> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
var models = await this.LoadModels(SecretStoreType.TRANSCRIPTION_PROVIDER, ["whisper-", "gpt-"], token, apiKeyProvisional);
return models.Where(model => model.Id.StartsWith("whisper-", StringComparison.InvariantCultureIgnoreCase) ||
model.Id.Contains("-transcribe", StringComparison.InvariantCultureIgnoreCase));
var result = await this.LoadModels(SecretStoreType.TRANSCRIPTION_PROVIDER, ["whisper-", "gpt-"], token, apiKeyProvisional);
return result with
{
Models =
[
..result.Models.Where(model => model.Id.StartsWith("whisper-", StringComparison.InvariantCultureIgnoreCase) ||
model.Id.Contains("-transcribe", StringComparison.InvariantCultureIgnoreCase))
]
};
}
#endregion
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, string[] prefixes, CancellationToken token, string? apiKeyProvisional = null)
private Task<ModelLoadResult> LoadModels(SecretStoreType storeType, string[] prefixes, CancellationToken token, string? apiKeyProvisional = null)
{
var secretKey = apiKeyProvisional switch
{
not null => apiKeyProvisional,
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
{
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
_ => null,
}
};
if (secretKey is null)
return [];
using var request = new HttpRequestMessage(HttpMethod.Get, "models");
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
using var response = await this.httpClient.SendAsync(request, token);
if(!response.IsSuccessStatusCode)
return [];
var modelResponse = await response.Content.ReadFromJsonAsync<ModelsResponse>(token);
return modelResponse.Data.Where(model => prefixes.Any(prefix => model.Id.StartsWith(prefix, StringComparison.InvariantCulture)));
return this.LoadModelsResponse<ModelsResponse>(
storeType,
"models",
modelResponse => modelResponse.Data.Where(model => prefixes.Any(prefix => model.Id.StartsWith(prefix, StringComparison.InvariantCulture))),
token,
apiKeyProvisional);
}
}

View File

@ -81,102 +81,70 @@ public sealed class ProviderOpenRouter() : BaseProvider(LLMProviders.OPEN_ROUTER
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return this.LoadEmbeddingModels(token, apiKeyProvisional);
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
#endregion
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
private Task<ModelLoadResult> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
{
var secretKey = apiKeyProvisional switch
{
not null => apiKeyProvisional,
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
return this.LoadModelsResponse<OpenRouterModelsResponse>(
storeType,
"models",
modelResponse => modelResponse.Data
.Where(n =>
!n.Id.Contains("whisper", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("dall-e", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("tts", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("embedding", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("moderation", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("stable-diffusion", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("flux", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("midjourney", StringComparison.OrdinalIgnoreCase))
.Select(n => new Model(n.Id, n.Name)),
token,
apiKeyProvisional,
requestConfigurator: (request, secretKey) =>
{
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
_ => null,
}
};
if (secretKey is null)
return [];
using var request = new HttpRequestMessage(HttpMethod.Get, "models");
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
// Set custom headers for project identification:
request.Headers.Add("HTTP-Referer", PROJECT_WEBSITE);
request.Headers.Add("X-Title", PROJECT_NAME);
using var response = await this.httpClient.SendAsync(request, token);
if(!response.IsSuccessStatusCode)
return [];
var modelResponse = await response.Content.ReadFromJsonAsync<OpenRouterModelsResponse>(token);
// Filter out non-text models (image, audio, embedding models) and convert to Model
return modelResponse.Data
.Where(n =>
!n.Id.Contains("whisper", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("dall-e", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("tts", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("embedding", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("moderation", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("stable-diffusion", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("flux", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("midjourney", StringComparison.OrdinalIgnoreCase))
.Select(n => new Model(n.Id, n.Name));
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
request.Headers.Add("HTTP-Referer", PROJECT_WEBSITE);
request.Headers.Add("X-Title", PROJECT_NAME);
});
}
private async Task<IEnumerable<Model>> LoadEmbeddingModels(CancellationToken token, string? apiKeyProvisional = null)
private Task<ModelLoadResult> LoadEmbeddingModels(CancellationToken token, string? apiKeyProvisional = null)
{
var secretKey = apiKeyProvisional switch
{
not null => apiKeyProvisional,
_ => await RUST_SERVICE.GetAPIKey(this, SecretStoreType.EMBEDDING_PROVIDER) switch
return this.LoadModelsResponse<OpenRouterModelsResponse>(
SecretStoreType.EMBEDDING_PROVIDER,
"embeddings/models",
modelResponse => modelResponse.Data.Select(n => new Model(n.Id, n.Name)),
token,
apiKeyProvisional,
requestConfigurator: (request, secretKey) =>
{
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
_ => null,
}
};
if (secretKey is null)
return [];
using var request = new HttpRequestMessage(HttpMethod.Get, "embeddings/models");
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
// Set custom headers for project identification:
request.Headers.Add("HTTP-Referer", PROJECT_WEBSITE);
request.Headers.Add("X-Title", PROJECT_NAME);
using var response = await this.httpClient.SendAsync(request, token);
if(!response.IsSuccessStatusCode)
return [];
var modelResponse = await response.Content.ReadFromJsonAsync<OpenRouterModelsResponse>(token);
// Convert all embedding models to Model
return modelResponse.Data.Select(n => new Model(n.Id, n.Name));
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
request.Headers.Add("HTTP-Referer", PROJECT_WEBSITE);
request.Headers.Add("X-Title", PROJECT_NAME);
});
}
}

View File

@ -77,30 +77,30 @@ public sealed class ProviderPerplexity() : BaseProvider(LLMProviders.PERPLEXITY,
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return this.LoadModels();
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
#endregion
private Task<IEnumerable<Model>> LoadModels() => Task.FromResult<IEnumerable<Model>>(KNOWN_MODELS);
private Task<ModelLoadResult> LoadModels() => Task.FromResult(ModelLoadResult.FromModels(KNOWN_MODELS));
}

View File

@ -81,7 +81,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
return await this.PerformStandardTextEmbeddingRequest(requestedSecret, embeddingModel, host, token: token, texts: texts);
}
public override async Task<IEnumerable<Provider.Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override async Task<ModelLoadResult> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
try
{
@ -90,7 +90,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
case Host.LLAMA_CPP:
// Right now, llama.cpp only supports one model.
// There is no API to list the model(s).
return [ new Provider.Model("as configured by llama.cpp", null) ];
return ModelLoadResult.FromModels([ new Provider.Model("as configured by llama.cpp", null) ]);
case Host.LM_STUDIO:
case Host.OLLAMA:
@ -98,22 +98,22 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
return await this.LoadModels( SecretStoreType.LLM_PROVIDER, ["embed"], [], token, apiKeyProvisional);
}
return [];
return ModelLoadResult.FromModels([]);
}
catch(Exception e)
{
LOGGER.LogError($"Failed to load text models from self-hosted provider: {e.Message}");
return [];
return ModelLoadResult.Failure(ModelLoadFailureReason.UNKNOWN, e.Message);
}
}
/// <inheritdoc />
public override Task<IEnumerable<Provider.Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Provider.Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
public override async Task<IEnumerable<Provider.Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override async Task<ModelLoadResult> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
try
{
@ -125,69 +125,61 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
return await this.LoadModels( SecretStoreType.EMBEDDING_PROVIDER, [], ["embed"], token, apiKeyProvisional);
}
return [];
return ModelLoadResult.FromModels([]);
}
catch(Exception e)
{
LOGGER.LogError($"Failed to load text models from self-hosted provider: {e.Message}");
return [];
return ModelLoadResult.Failure(ModelLoadFailureReason.UNKNOWN, e.Message);
}
}
/// <inheritdoc />
public override async Task<IEnumerable<Provider.Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override async Task<ModelLoadResult> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
try
{
switch (host)
{
case Host.WHISPER_CPP:
return new List<Provider.Model>
{
new("loaded-model", TB("Model as configured by whisper.cpp")),
};
return ModelLoadResult.FromModels(
[
new Provider.Model("loaded-model", TB("Model as configured by whisper.cpp")),
]);
case Host.OLLAMA:
case Host.VLLM:
return await this.LoadModels(SecretStoreType.TRANSCRIPTION_PROVIDER, [], [], token, apiKeyProvisional);
default:
return [];
return ModelLoadResult.FromModels([]);
}
}
catch (Exception e)
{
LOGGER.LogError($"Failed to load transcription models from self-hosted provider: {e.Message}");
return [];
return ModelLoadResult.Failure(ModelLoadFailureReason.UNKNOWN, e.Message);
}
}
#endregion
private async Task<IEnumerable<Provider.Model>> LoadModels(SecretStoreType storeType, string[] ignorePhrases, string[] filterPhrases, CancellationToken token, string? apiKeyProvisional = null)
private async Task<ModelLoadResult> LoadModels(SecretStoreType storeType, string[] ignorePhrases, string[] filterPhrases, CancellationToken token, string? apiKeyProvisional = null)
{
var secretKey = apiKeyProvisional switch
{
not null => apiKeyProvisional,
_ => await RUST_SERVICE.GetAPIKey(this, storeType, isTrying: true) switch
{
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
_ => null,
}
};
var secretKey = await this.GetModelLoadingSecretKey(storeType, apiKeyProvisional, true);
using var lmStudioRequest = new HttpRequestMessage(HttpMethod.Get, "models");
if(secretKey is not null)
lmStudioRequest.Headers.Authorization = new AuthenticationHeaderValue("Bearer", apiKeyProvisional);
lmStudioRequest.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
using var lmStudioResponse = await this.httpClient.SendAsync(lmStudioRequest, token);
using var lmStudioResponse = await this.HttpClient.SendAsync(lmStudioRequest, token);
if(!lmStudioResponse.IsSuccessStatusCode)
return [];
return FailedModelLoadResult(GetDefaultModelLoadFailureReason(lmStudioResponse), $"Status={(int)lmStudioResponse.StatusCode} {lmStudioResponse.ReasonPhrase}");
var lmStudioModelResponse = await lmStudioResponse.Content.ReadFromJsonAsync<ModelsResponse>(token);
return lmStudioModelResponse.Data.
return SuccessfulModelLoadResult(lmStudioModelResponse.Data.
Where(model => !ignorePhrases.Any(ignorePhrase => model.Id.Contains(ignorePhrase, StringComparison.InvariantCulture)) &&
filterPhrases.All( filter => model.Id.Contains(filter, StringComparison.InvariantCulture)))
.Select(n => new Provider.Model(n.Id, null));
.Select(n => new Provider.Model(n.Id, null)));
}
}

View File

@ -1,4 +1,3 @@
using System.Net.Http.Headers;
using System.Runtime.CompilerServices;
using AIStudio.Chat;
@ -71,67 +70,49 @@ public sealed class ProviderX() : BaseProvider(LLMProviders.X, "https://api.x.ai
}
/// <inheritdoc />
public override async Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override async Task<ModelLoadResult> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
var models = await this.LoadModels(SecretStoreType.LLM_PROVIDER, ["grok-"], token, apiKeyProvisional);
return models.Where(n => !n.Id.Contains("-image", StringComparison.OrdinalIgnoreCase));
var result = await this.LoadModels(SecretStoreType.LLM_PROVIDER, ["grok-"], token, apiKeyProvisional);
return result with
{
Models = [..result.Models.Where(n => !n.Id.Contains("-image", StringComparison.OrdinalIgnoreCase))]
};
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult<IEnumerable<Model>>([]);
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult<IEnumerable<Model>>([]);
return Task.FromResult(ModelLoadResult.FromModels([]));
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override Task<ModelLoadResult> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
return Task.FromResult(ModelLoadResult.FromModels([]));
}
#endregion
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, string[] prefixes, CancellationToken token, string? apiKeyProvisional = null)
private Task<ModelLoadResult> LoadModels(SecretStoreType storeType, string[] prefixes, CancellationToken token, string? apiKeyProvisional = null)
{
var secretKey = apiKeyProvisional switch
{
not null => apiKeyProvisional,
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
{
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
_ => null,
}
};
if (secretKey is null)
return [];
using var request = new HttpRequestMessage(HttpMethod.Get, "models");
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
using var response = await this.httpClient.SendAsync(request, token);
if(!response.IsSuccessStatusCode)
return [];
var modelResponse = await response.Content.ReadFromJsonAsync<ModelsResponse>(token);
//
// The API does not return the alias model names, so we have to add them manually:
// Right now, the only alias to add is `grok-2-latest`.
//
return modelResponse.Data.Where(model => prefixes.Any(prefix => model.Id.StartsWith(prefix, StringComparison.InvariantCulture)))
.Concat([
new Model
{
Id = "grok-2-latest",
DisplayName = "Grok 2.0 (latest)",
}
]);
return this.LoadModelsResponse<ModelsResponse>(
storeType,
"models",
modelResponse => modelResponse.Data.Where(model => prefixes.Any(prefix => model.Id.StartsWith(prefix, StringComparison.InvariantCulture)))
.Concat([
new Model
{
Id = "grok-2-latest",
DisplayName = "Grok 2.0 (latest)",
}
]),
token,
apiKeyProvisional);
}
}

View File

@ -24,6 +24,7 @@
- Improved the logbook reliability by significantly reducing duplicate log entries.
- Improved file attachments in chats: configuration and project files such as `Dockerfile`, `Caddyfile`, `Makefile`, or `Jenkinsfile` are now included more reliably when you send them to the AI.
- Improved the validation of additional API parameters in the advanced provider settings to help catch formatting mistakes earlier.
- Improved the model checks and model list loading by showing clearer error messages when AI Studio cannot access a provider because the API key is missing, invalid, expired, or lacks the required permissions.
- Improved the app startup resilience by allowing AI Studio to continue without Qdrant if it fails to initialize.
- Improved the translation assistant by updating the system and user prompts.
- Improved OpenAI-compatible providers by refactoring their streaming request handling to be more consistent and reliable.