Fixed a streaming bug (#250)

This commit is contained in:
Thorsten Sommer 2025-01-04 12:30:05 +01:00 committed by GitHub
parent 187663bbf2
commit 258bc7a338
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 46 additions and 55 deletions

View File

@ -77,7 +77,7 @@ public sealed class ProviderAnthropic(ILogger logger) : BaseProvider("https://ap
} }
// Send the request using exponential backoff: // Send the request using exponential backoff:
using var responseData = await this.SendRequest(RequestBuilder, token); var responseData = await this.SendRequest(RequestBuilder, token);
if(responseData.IsFailedAfterAllRetries) if(responseData.IsFailedAfterAllRetries)
{ {
this.logger.LogError($"Anthropic chat completion failed: {responseData.ErrorMessage}"); this.logger.LogError($"Anthropic chat completion failed: {responseData.ErrorMessage}");

View File

@ -83,7 +83,7 @@ public class ProviderFireworks(ILogger logger) : BaseProvider("https://api.firew
} }
// Send the request using exponential backoff: // Send the request using exponential backoff:
using var responseData = await this.SendRequest(RequestBuilder, token); var responseData = await this.SendRequest(RequestBuilder, token);
if(responseData.IsFailedAfterAllRetries) if(responseData.IsFailedAfterAllRetries)
{ {
this.logger.LogError($"Fireworks chat completion failed: {responseData.ErrorMessage}"); this.logger.LogError($"Fireworks chat completion failed: {responseData.ErrorMessage}");

View File

@ -84,7 +84,7 @@ public class ProviderGoogle(ILogger logger) : BaseProvider("https://generativela
} }
// Send the request using exponential backoff: // Send the request using exponential backoff:
using var responseData = await this.SendRequest(RequestBuilder, token); var responseData = await this.SendRequest(RequestBuilder, token);
if(responseData.IsFailedAfterAllRetries) if(responseData.IsFailedAfterAllRetries)
{ {
this.logger.LogError($"Google chat completion failed: {responseData.ErrorMessage}"); this.logger.LogError($"Google chat completion failed: {responseData.ErrorMessage}");

View File

@ -86,7 +86,7 @@ public class ProviderGroq(ILogger logger) : BaseProvider("https://api.groq.com/o
} }
// Send the request using exponential backoff: // Send the request using exponential backoff:
using var responseData = await this.SendRequest(RequestBuilder, token); var responseData = await this.SendRequest(RequestBuilder, token);
if(responseData.IsFailedAfterAllRetries) if(responseData.IsFailedAfterAllRetries)
{ {
this.logger.LogError($"Groq chat completion failed: {responseData.ErrorMessage}"); this.logger.LogError($"Groq chat completion failed: {responseData.ErrorMessage}");

View File

@ -85,7 +85,7 @@ public sealed class ProviderMistral(ILogger logger) : BaseProvider("https://api.
} }
// Send the request using exponential backoff: // Send the request using exponential backoff:
using var responseData = await this.SendRequest(RequestBuilder, token); var responseData = await this.SendRequest(RequestBuilder, token);
if(responseData.IsFailedAfterAllRetries) if(responseData.IsFailedAfterAllRetries)
{ {
this.logger.LogError($"Mistral chat completion failed: {responseData.ErrorMessage}"); this.logger.LogError($"Mistral chat completion failed: {responseData.ErrorMessage}");

View File

@ -113,7 +113,7 @@ public sealed class ProviderOpenAI(ILogger logger) : BaseProvider("https://api.o
} }
// Send the request using exponential backoff: // Send the request using exponential backoff:
using var responseData = await this.SendRequest(RequestBuilder, token); var responseData = await this.SendRequest(RequestBuilder, token);
if(responseData.IsFailedAfterAllRetries) if(responseData.IsFailedAfterAllRetries)
{ {
this.logger.LogError($"OpenAI chat completion failed: {responseData.ErrorMessage}"); this.logger.LogError($"OpenAI chat completion failed: {responseData.ErrorMessage}");

View File

@ -85,7 +85,7 @@ public sealed class ProviderSelfHosted(ILogger logger, Host host, string hostnam
} }
// Send the request using exponential backoff: // Send the request using exponential backoff:
using var responseData = await this.SendRequest(RequestBuilder, token); var responseData = await this.SendRequest(RequestBuilder, token);
if(responseData.IsFailedAfterAllRetries) if(responseData.IsFailedAfterAllRetries)
{ {
this.logger.LogError($"Self-hosted provider's chat completion failed: {responseData.ErrorMessage}"); this.logger.LogError($"Self-hosted provider's chat completion failed: {responseData.ErrorMessage}");

View File

@ -10,14 +10,4 @@ public readonly record struct HttpRateLimitedStreamResult(
bool IsSuccessful, bool IsSuccessful,
bool IsFailedAfterAllRetries, bool IsFailedAfterAllRetries,
string ErrorMessage, string ErrorMessage,
HttpResponseMessage? Response) : IDisposable HttpResponseMessage? Response);
{
#region IDisposable
public void Dispose()
{
this.Response?.Dispose();
}
#endregion
}

View File

@ -3,3 +3,4 @@
- Added a button to regenerate the last AI response. - Added a button to regenerate the last AI response.
- Added a button to edit the last user message. - Added a button to edit the last user message.
- Added a button to stop the AI from generating a response. - Added a button to stop the AI from generating a response.
- Fixed a streaming bug that was particularly visible with self-hosted providers.