This commit is contained in:
Sabrina-devops 2026-05-09 23:51:38 +00:00 committed by GitHub
commit 3fb2d368be
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 230 additions and 60 deletions

View File

@ -212,6 +212,10 @@ CONFIG["SETTINGS"] = {}
-- Examples are: "CmdOrControl+Shift+D", "Alt+F9", "F8"
-- CONFIG["SETTINGS"]["DataApp.ShortcutVoiceRecording"] = "CmdOrControl+1"
-- Configure the HTTP timeout for requests to LLM providers, in seconds.
-- The default is 3600 (1 hour).
-- CONFIG["SETTINGS"]["DataApp.ProviderHttpTimeoutSeconds"] = 3600
-- Example chat templates for this configuration:
CONFIG["CHAT_TEMPLATES"] = {}

View File

@ -24,7 +24,10 @@ namespace AIStudio.Provider;
/// </summary>
public abstract class BaseProvider : IProvider, ISecretId
{
private const int DEFAULT_HTTP_TIMEOUT_SECONDS = 3600;
private static string TB(string fallbackEN) => I18N.I.T(fallbackEN, typeof(BaseProvider).Namespace, nameof(BaseProvider));
private static readonly SettingsManager SETTINGS_MANAGER = Program.SERVICE_PROVIDER.GetRequiredService<SettingsManager>();
/// <summary>
/// The HTTP client to use it for all requests.
@ -74,6 +77,7 @@ public abstract class BaseProvider : IProvider, ISecretId
// Set the base URL:
this.HttpClient.BaseAddress = new(url);
this.HttpClient.Timeout = GetProviderHttpTimeout();
}
#region Handling of IProvider, which all providers must implement
@ -136,6 +140,58 @@ public abstract class BaseProvider : IProvider, ISecretId
protected static ModelLoadResult FailedModelLoadResult(ModelLoadFailureReason failureReason, string? technicalDetails = null) => ModelLoadResult.Failure(failureReason, technicalDetails);
protected bool IsTimeoutException(Exception exception, CancellationToken token = default)
{
if (token.IsCancellationRequested)
return false;
if (exception is TimeoutException)
return true;
if (exception is OperationCanceledException)
return true;
return exception.InnerException is not null && this.IsTimeoutException(exception.InnerException, token);
}
protected Task SendTimeoutError(string action) => MessageBus.INSTANCE.SendError(new(
Icons.Material.Filled.HourglassTop,
string.Format(
TB("The request to the LLM provider '{0}' (type={1}) timed out after {2} while {3}. Please try again or check whether the provider is still responding."),
this.InstanceName,
this.Provider,
GetProviderHttpTimeoutDescription(),
action)));
private static TimeSpan GetProviderHttpTimeout()
{
var seconds = SETTINGS_MANAGER.ConfigurationData.App.ProviderHttpTimeoutSeconds;
if (seconds <= 0)
seconds = DEFAULT_HTTP_TIMEOUT_SECONDS;
return TimeSpan.FromSeconds(seconds);
}
private static string GetProviderHttpTimeoutDescription()
{
var timeout = GetProviderHttpTimeout();
if (timeout.TotalHours >= 1 && timeout.TotalMinutes % 60 == 0)
{
var hours = (int)timeout.TotalHours;
return hours == 1 ? "1 hour" : $"{hours} hours";
}
if (timeout.TotalMinutes >= 1 && timeout.TotalSeconds % 60 == 0)
{
var minutes = (int)timeout.TotalMinutes;
return minutes == 1 ? "1 minute" : $"{minutes} minutes";
}
var seconds = (int)timeout.TotalSeconds;
return seconds == 1 ? "1 second" : $"{seconds} seconds";
}
protected async Task<string?> GetModelLoadingSecretKey(SecretStoreType storeType, string? apiKeyProvisional = null, bool isTryingSecret = false) => apiKeyProvisional switch
{
not null => apiKeyProvisional,
@ -175,25 +231,34 @@ public abstract class BaseProvider : IProvider, ISecretId
else if (!string.IsNullOrWhiteSpace(secretKey))
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
using var response = await this.HttpClient.SendAsync(request, token);
var responseBody = await response.Content.ReadAsStringAsync(token);
if (!response.IsSuccessStatusCode)
{
var failureReason = failureReasonSelector?.Invoke(response, responseBody) ?? GetDefaultModelLoadFailureReason(response);
return FailedModelLoadResult(failureReason, $"Status={(int)response.StatusCode} {response.ReasonPhrase}; Body='{responseBody}'");
}
try
{
var parsedResponse = JsonSerializer.Deserialize<TResponse>(responseBody, jsonSerializerOptions ?? JSON_SERIALIZER_OPTIONS);
if (parsedResponse is null)
return FailedModelLoadResult(ModelLoadFailureReason.INVALID_RESPONSE, "Model list response could not be deserialized.");
using var response = await this.HttpClient.SendAsync(request, token);
var responseBody = await response.Content.ReadAsStringAsync(token);
if (!response.IsSuccessStatusCode)
{
var failureReason = failureReasonSelector?.Invoke(response, responseBody) ?? GetDefaultModelLoadFailureReason(response);
return FailedModelLoadResult(failureReason, $"Status={(int)response.StatusCode} {response.ReasonPhrase}; Body='{responseBody}'");
}
return SuccessfulModelLoadResult(modelFactory(parsedResponse));
try
{
var parsedResponse = JsonSerializer.Deserialize<TResponse>(responseBody, jsonSerializerOptions ?? JSON_SERIALIZER_OPTIONS);
if (parsedResponse is null)
return FailedModelLoadResult(ModelLoadFailureReason.INVALID_RESPONSE, "Model list response could not be deserialized.");
return SuccessfulModelLoadResult(modelFactory(parsedResponse));
}
catch (Exception e)
{
return FailedModelLoadResult(ModelLoadFailureReason.INVALID_RESPONSE, e.Message);
}
}
catch (Exception e)
catch (Exception e) when (this.IsTimeoutException(e, token))
{
return FailedModelLoadResult(ModelLoadFailureReason.INVALID_RESPONSE, e.Message);
await this.SendTimeoutError("loading the available models");
this.logger.LogError(e, "Timed out while loading models from provider '{ProviderInstanceName}' (provider={ProviderType}).", this.InstanceName, this.Provider);
return FailedModelLoadResult(ModelLoadFailureReason.PROVIDER_UNAVAILABLE, e.Message);
}
}
@ -223,7 +288,18 @@ public abstract class BaseProvider : IProvider, ISecretId
// Please notice: We do not dispose the response here. The caller is responsible
// for disposing the response object. This is important because the response
// object is used to read the stream.
var nextResponse = await this.HttpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, token);
HttpResponseMessage nextResponse;
try
{
nextResponse = await this.HttpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, token);
}
catch (Exception e) when (this.IsTimeoutException(e, token))
{
await this.SendTimeoutError("waiting for the chat response");
this.logger.LogError(e, "Timed out while sending a streaming request to provider '{ProviderInstanceName}' (provider={ProviderType}).", this.InstanceName, this.Provider);
return new HttpRateLimitedStreamResult(false, true, e.Message, response);
}
if (nextResponse.IsSuccessStatusCode)
{
response = nextResponse;
@ -341,8 +417,20 @@ public abstract class BaseProvider : IProvider, ISecretId
}
catch(Exception e)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Stream, string.Format(TB("Tried to communicate with the LLM provider '{0}'. There were some problems with the request. The provider message is: '{1}'"), this.InstanceName, e.Message)));
this.logger.LogError($"Failed to stream chat completion from {providerName} '{this.InstanceName}': {e.Message}");
if (token.IsCancellationRequested)
{
this.logger.LogWarning("The user canceled the chat completion request for {ProviderName} '{ProviderInstanceName}' before the response stream was opened.", providerName, this.InstanceName);
}
else if (this.IsTimeoutException(e, token))
{
await this.SendTimeoutError("opening the chat response stream");
this.logger.LogError(e, "Timed out while opening the chat completion stream from {ProviderName} '{ProviderInstanceName}'.", providerName, this.InstanceName);
}
else
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Stream, string.Format(TB("Tried to communicate with the LLM provider '{0}'. There were some problems with the request. The provider message is: '{1}'"), this.InstanceName, e.Message)));
this.logger.LogError($"Failed to stream chat completion from {providerName} '{this.InstanceName}': {e.Message}");
}
}
if (streamReader is null)
@ -383,8 +471,21 @@ public abstract class BaseProvider : IProvider, ISecretId
}
catch (Exception e)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Stream, string.Format(TB("Tried to stream the LLM provider '{0}' answer. Was not able to read the stream. The message is: '{1}'"), this.InstanceName, e.Message)));
this.logger.LogError($"Failed to read the stream from {providerName} '{this.InstanceName}': {e.Message}");
if (token.IsCancellationRequested)
{
this.logger.LogWarning("The user canceled the chat completion stream for {ProviderName} '{ProviderInstanceName}' while reading the next chunk.", providerName, this.InstanceName);
}
else if (this.IsTimeoutException(e, token))
{
await this.SendTimeoutError("reading the chat response stream");
this.logger.LogError(e, "Timed out while reading the chat stream from {ProviderName} '{ProviderInstanceName}'.", providerName, this.InstanceName);
}
else
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Stream, string.Format(TB("Tried to stream the LLM provider '{0}' answer. Was not able to read the stream. The message is: '{1}'"), this.InstanceName, e.Message)));
this.logger.LogError($"Failed to read the stream from {providerName} '{this.InstanceName}': {e.Message}");
}
break;
}
@ -505,8 +606,20 @@ public abstract class BaseProvider : IProvider, ISecretId
}
catch(Exception e)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Stream, string.Format(TB("Tried to communicate with the LLM provider '{0}'. There were some problems with the request. The provider message is: '{1}'"), this.InstanceName, e.Message)));
this.logger.LogError($"Failed to stream responses from {providerName} '{this.InstanceName}': {e.Message}");
if (token.IsCancellationRequested)
{
this.logger.LogWarning("The user canceled the responses request for {ProviderName} '{ProviderInstanceName}' before the response stream was opened.", providerName, this.InstanceName);
}
else if (this.IsTimeoutException(e, token))
{
await this.SendTimeoutError("opening the chat response stream");
this.logger.LogError(e, "Timed out while opening the responses stream from {ProviderName} '{ProviderInstanceName}'.", providerName, this.InstanceName);
}
else
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Stream, string.Format(TB("Tried to communicate with the LLM provider '{0}'. There were some problems with the request. The provider message is: '{1}'"), this.InstanceName, e.Message)));
this.logger.LogError($"Failed to stream responses from {providerName} '{this.InstanceName}': {e.Message}");
}
}
if (streamReader is null)
@ -547,8 +660,21 @@ public abstract class BaseProvider : IProvider, ISecretId
}
catch (Exception e)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Stream, string.Format(TB("Tried to stream the LLM provider '{0}' answer. Was not able to read the stream. The message is: '{1}'"), this.InstanceName, e.Message)));
this.logger.LogError($"Failed to read the stream from {providerName} '{this.InstanceName}': {e.Message}");
if (token.IsCancellationRequested)
{
this.logger.LogWarning("The user canceled the responses stream for {ProviderName} '{ProviderInstanceName}' while reading the next chunk.", providerName, this.InstanceName);
}
else if (this.IsTimeoutException(e, token))
{
await this.SendTimeoutError("reading the chat response stream");
this.logger.LogError(e, "Timed out while reading the responses stream from {ProviderName} '{ProviderInstanceName}'.", providerName, this.InstanceName);
}
else
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Stream, string.Format(TB("Tried to stream the LLM provider '{0}' answer. Was not able to read the stream. The message is: '{1}'"), this.InstanceName, e.Message)));
this.logger.LogError($"Failed to read the stream from {providerName} '{this.InstanceName}': {e.Message}");
}
break;
}
@ -784,6 +910,9 @@ public abstract class BaseProvider : IProvider, ISecretId
}
catch (Exception e)
{
if (this.IsTimeoutException(e, token))
await this.SendTimeoutError("transcribing audio");
this.logger.LogError("Failed to perform transcription request: '{Message}'.", e.Message);
return string.Empty;
}
@ -859,6 +988,9 @@ public abstract class BaseProvider : IProvider, ISecretId
}
catch (Exception e)
{
if (this.IsTimeoutException(e, token))
await this.SendTimeoutError("creating embeddings");
this.logger.LogError("Failed to perform embedding request: '{Message}'.", e.Message);
return [];
}

View File

@ -135,6 +135,9 @@ public class ProviderGoogle() : BaseProvider(LLMProviders.GOOGLE, "https://gener
}
catch (Exception e)
{
if (this.IsTimeoutException(e, token))
await this.SendTimeoutError("creating embeddings");
LOGGER.LogError("Failed to perform embedding request: '{Message}'.", e.Message);
return [];
}

View File

@ -125,31 +125,40 @@ public sealed class ProviderHelmholtz() : BaseProvider(LLMProviders.HELMHOLTZ, "
if (string.IsNullOrWhiteSpace(secretKey))
return FailedModelLoadResult(ModelLoadFailureReason.INVALID_OR_MISSING_API_KEY, "No API key available for model loading.");
using var request = new HttpRequestMessage(HttpMethod.Get, "models");
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
using var response = await this.HttpClient.SendAsync(request, token);
var body = await response.Content.ReadAsStringAsync(token);
if (!response.IsSuccessStatusCode)
return FailedModelLoadResult(GetDefaultModelLoadFailureReason(response), $"Status={(int)response.StatusCode} {response.ReasonPhrase}; Body='{body}'");
try
{
var modelResponse = JsonSerializer.Deserialize<ModelsResponse>(body, JSON_SERIALIZER_OPTIONS);
return SuccessfulModelLoadResult(modelResponse.Data);
using var request = new HttpRequestMessage(HttpMethod.Get, "models");
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
using var response = await this.HttpClient.SendAsync(request, token);
var body = await response.Content.ReadAsStringAsync(token);
if (!response.IsSuccessStatusCode)
return FailedModelLoadResult(GetDefaultModelLoadFailureReason(response), $"Status={(int)response.StatusCode} {response.ReasonPhrase}; Body='{body}'");
try
{
var modelResponse = JsonSerializer.Deserialize<ModelsResponse>(body, JSON_SERIALIZER_OPTIONS);
return SuccessfulModelLoadResult(modelResponse.Data);
}
catch (JsonException e)
{
if (body.Contains("API key", StringComparison.InvariantCultureIgnoreCase))
return FailedModelLoadResult(ModelLoadFailureReason.INVALID_OR_MISSING_API_KEY, body);
LOGGER.LogError(e, "Unexpected error while parsing models from Helmholtz API response. Status Code: {StatusCode}. Reason: {ReasonPhrase}. Response Body: '{ResponseBody}'", response.StatusCode, response.ReasonPhrase, body);
return FailedModelLoadResult(ModelLoadFailureReason.INVALID_RESPONSE, body);
}
catch (Exception e)
{
LOGGER.LogError(e, "Unexpected error while loading models from Helmholtz API. Status Code: {StatusCode}. Reason: {ReasonPhrase}", response.StatusCode, response.ReasonPhrase);
return FailedModelLoadResult(ModelLoadFailureReason.UNKNOWN, e.Message);
}
}
catch (JsonException e)
catch (Exception e) when (this.IsTimeoutException(e, token))
{
if (body.Contains("API key", StringComparison.InvariantCultureIgnoreCase))
return FailedModelLoadResult(ModelLoadFailureReason.INVALID_OR_MISSING_API_KEY, body);
LOGGER.LogError(e, "Unexpected error while parsing models from Helmholtz API response. Status Code: {StatusCode}. Reason: {ReasonPhrase}. Response Body: '{ResponseBody}'", response.StatusCode, response.ReasonPhrase, body);
return FailedModelLoadResult(ModelLoadFailureReason.INVALID_RESPONSE, body);
}
catch (Exception e)
{
LOGGER.LogError(e, "Unexpected error while loading models from Helmholtz API. Status Code: {StatusCode}. Reason: {ReasonPhrase}", response.StatusCode, response.ReasonPhrase);
return FailedModelLoadResult(ModelLoadFailureReason.UNKNOWN, e.Message);
await this.SendTimeoutError("loading the available models");
LOGGER.LogError(e, "Timed out while loading models from Helmholtz provider '{ProviderInstanceName}'.", this.InstanceName);
return FailedModelLoadResult(ModelLoadFailureReason.PROVIDER_UNAVAILABLE, e.Message);
}
}
}
}

View File

@ -172,19 +172,28 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
private async Task<ModelLoadResult> LoadModels(SecretStoreType storeType, string[] ignorePhrases, string[] filterPhrases, CancellationToken token, string? apiKeyProvisional = null)
{
var secretKey = await this.GetModelLoadingSecretKey(storeType, apiKeyProvisional, true);
using var lmStudioRequest = new HttpRequestMessage(HttpMethod.Get, "models");
if(secretKey is not null)
lmStudioRequest.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
using var lmStudioResponse = await this.HttpClient.SendAsync(lmStudioRequest, token);
if(!lmStudioResponse.IsSuccessStatusCode)
return FailedModelLoadResult(GetDefaultModelLoadFailureReason(lmStudioResponse), $"Status={(int)lmStudioResponse.StatusCode} {lmStudioResponse.ReasonPhrase}");
var lmStudioModelResponse = await lmStudioResponse.Content.ReadFromJsonAsync<ModelsResponse>(token);
return SuccessfulModelLoadResult(lmStudioModelResponse.Data.
Where(model => !ignorePhrases.Any(ignorePhrase => model.Id.Contains(ignorePhrase, StringComparison.InvariantCulture)) &&
filterPhrases.All( filter => model.Id.Contains(filter, StringComparison.InvariantCulture)))
.Select(n => new Provider.Model(n.Id, null)));
try
{
using var lmStudioRequest = new HttpRequestMessage(HttpMethod.Get, "models");
if(secretKey is not null)
lmStudioRequest.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
using var lmStudioResponse = await this.HttpClient.SendAsync(lmStudioRequest, token);
if(!lmStudioResponse.IsSuccessStatusCode)
return FailedModelLoadResult(GetDefaultModelLoadFailureReason(lmStudioResponse), $"Status={(int)lmStudioResponse.StatusCode} {lmStudioResponse.ReasonPhrase}");
var lmStudioModelResponse = await lmStudioResponse.Content.ReadFromJsonAsync<ModelsResponse>(token);
return SuccessfulModelLoadResult(lmStudioModelResponse.Data.
Where(model => !ignorePhrases.Any(ignorePhrase => model.Id.Contains(ignorePhrase, StringComparison.InvariantCulture)) &&
filterPhrases.All( filter => model.Id.Contains(filter, StringComparison.InvariantCulture)))
.Select(n => new Provider.Model(n.Id, null)));
}
catch (Exception e) when (this.IsTimeoutException(e, token))
{
await this.SendTimeoutError("loading the available models");
LOGGER.LogError(e, "Timed out while loading models from self-hosted provider '{ProviderInstanceName}'.", this.InstanceName);
return FailedModelLoadResult(ModelLoadFailureReason.PROVIDER_UNAVAILABLE, e.Message);
}
}
}
}

View File

@ -94,6 +94,11 @@ public sealed class DataApp(Expression<Func<Data, DataApp>>? configSelection = n
/// </summary>
public string ShortcutVoiceRecording { get; set; } = ManagedConfiguration.Register(configSelection, n => n.ShortcutVoiceRecording, string.Empty);
/// <summary>
/// The HTTP timeout in seconds for requests to LLM providers.
/// </summary>
public int ProviderHttpTimeoutSeconds { get; set; } = ManagedConfiguration.Register(configSelection, n => n.ProviderHttpTimeoutSeconds, 3600);
/// <summary>
/// Should the user be allowed to add providers?
/// </summary>

View File

@ -140,6 +140,9 @@ public sealed class PluginConfiguration(bool isInternal, LuaState state, PluginT
// Config: global voice recording shortcut
ManagedConfiguration.TryProcessConfiguration(x => x.App, x => x.ShortcutVoiceRecording, this.Id, settingsTable, dryRun);
// Config: timeout for HTTP requests to providers
ManagedConfiguration.TryProcessConfiguration(x => x.App, x => x.ProviderHttpTimeoutSeconds, this.Id, settingsTable, dryRun);
// Handle configured LLM providers:
PluginConfigurationObject.TryParse(PluginConfigurationObjectType.LLM_PROVIDER, x => x.Providers, x => x.NextProviderNum, mainTable, this.Id, ref this.configObjects, dryRun);

View File

@ -241,6 +241,10 @@ public static partial class PluginFactory
if(ManagedConfiguration.IsConfigurationLeftOver(x => x.App, x => x.ShortcutVoiceRecording, AVAILABLE_PLUGINS))
wasConfigurationChanged = true;
// Check for the provider HTTP timeout:
if(ManagedConfiguration.IsConfigurationLeftOver(x => x.App, x => x.ProviderHttpTimeoutSeconds, AVAILABLE_PLUGINS))
wasConfigurationChanged = true;
// Check if audit is required before it can be activated
if(ManagedConfiguration.IsConfigurationLeftOver(x => x.AssistantPluginAudit, x => x.RequireAuditBeforeActivation, AVAILABLE_PLUGINS))
wasConfigurationChanged = true;

View File

@ -39,4 +39,5 @@
- Fixed security issues in the native app runtime by strengthening how AI Studio creates and protects the secret values used for its internal secure connection.
- Updated several security-sensitive Rust dependencies in the native runtime to address known vulnerabilities.
- Updated .NET to v9.0.15
- Updated dependencies
- Updated dependencies
- Updated the timeout from a few seconds to 1 hour and configured it to be adjustable via the Enterprise settings.