mirror of
https://github.com/MindWorkAI/AI-Studio.git
synced 2025-02-05 11:49:06 +00:00
Improved the stream handling for all providers (#251)
This commit is contained in:
parent
258bc7a338
commit
d6521850e9
@ -60,6 +60,9 @@ public sealed class ProviderAnthropic(ILogger logger) : BaseProvider("https://ap
|
||||
Stream = true,
|
||||
}, JSON_SERIALIZER_OPTIONS);
|
||||
|
||||
StreamReader? streamReader = null;
|
||||
try
|
||||
{
|
||||
async Task<HttpRequestMessage> RequestBuilder()
|
||||
{
|
||||
// Build the HTTP post request:
|
||||
@ -88,17 +91,45 @@ public sealed class ProviderAnthropic(ILogger logger) : BaseProvider("https://ap
|
||||
var stream = await responseData.Response!.Content.ReadAsStreamAsync(token);
|
||||
|
||||
// Add a stream reader to read the stream, line by line:
|
||||
var streamReader = new StreamReader(stream);
|
||||
streamReader = new StreamReader(stream);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.logger.LogError($"Failed to stream chat completion from Anthropic '{this.InstanceName}': {e.Message}");
|
||||
}
|
||||
|
||||
if (streamReader is null)
|
||||
yield break;
|
||||
|
||||
// Read the stream, line by line:
|
||||
while(!streamReader.EndOfStream)
|
||||
while(true)
|
||||
{
|
||||
try
|
||||
{
|
||||
if(streamReader.EndOfStream)
|
||||
break;
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.logger.LogWarning($"Failed to read the end-of-stream state from Anthropic '{this.InstanceName}': {e.Message}");
|
||||
break;
|
||||
}
|
||||
|
||||
// Check if the token is canceled:
|
||||
if(token.IsCancellationRequested)
|
||||
yield break;
|
||||
|
||||
// Read the next line:
|
||||
var line = await streamReader.ReadLineAsync(token);
|
||||
string? line;
|
||||
try
|
||||
{
|
||||
line = await streamReader.ReadLineAsync(token);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.logger.LogError($"Failed to read the stream from Anthropic '{this.InstanceName}': {e.Message}");
|
||||
break;
|
||||
}
|
||||
|
||||
// Skip empty lines:
|
||||
if(string.IsNullOrWhiteSpace(line))
|
||||
|
@ -69,6 +69,9 @@ public class ProviderFireworks(ILogger logger) : BaseProvider("https://api.firew
|
||||
Stream = true,
|
||||
}, JSON_SERIALIZER_OPTIONS);
|
||||
|
||||
StreamReader? streamReader = null;
|
||||
try
|
||||
{
|
||||
async Task<HttpRequestMessage> RequestBuilder()
|
||||
{
|
||||
// Build the HTTP post request:
|
||||
@ -94,17 +97,45 @@ public class ProviderFireworks(ILogger logger) : BaseProvider("https://api.firew
|
||||
var fireworksStream = await responseData.Response!.Content.ReadAsStreamAsync(token);
|
||||
|
||||
// Add a stream reader to read the stream, line by line:
|
||||
var streamReader = new StreamReader(fireworksStream);
|
||||
streamReader = new StreamReader(fireworksStream);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.logger.LogError($"Failed to stream chat completion from Fireworks '{this.InstanceName}': {e.Message}");
|
||||
}
|
||||
|
||||
if (streamReader is null)
|
||||
yield break;
|
||||
|
||||
// Read the stream, line by line:
|
||||
while(!streamReader.EndOfStream)
|
||||
while(true)
|
||||
{
|
||||
try
|
||||
{
|
||||
if(streamReader.EndOfStream)
|
||||
break;
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.logger.LogWarning($"Failed to read the end-of-stream state from Fireworks '{this.InstanceName}': {e.Message}");
|
||||
break;
|
||||
}
|
||||
|
||||
// Check if the token is canceled:
|
||||
if(token.IsCancellationRequested)
|
||||
yield break;
|
||||
|
||||
// Read the next line:
|
||||
var line = await streamReader.ReadLineAsync(token);
|
||||
string? line;
|
||||
try
|
||||
{
|
||||
line = await streamReader.ReadLineAsync(token);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.logger.LogError($"Failed to read the stream from Fireworks '{this.InstanceName}': {e.Message}");
|
||||
break;
|
||||
}
|
||||
|
||||
// Skip empty lines:
|
||||
if(string.IsNullOrWhiteSpace(line))
|
||||
|
@ -70,6 +70,9 @@ public class ProviderGoogle(ILogger logger) : BaseProvider("https://generativela
|
||||
Stream = true,
|
||||
}, JSON_SERIALIZER_OPTIONS);
|
||||
|
||||
StreamReader? streamReader = null;
|
||||
try
|
||||
{
|
||||
async Task<HttpRequestMessage> RequestBuilder()
|
||||
{
|
||||
// Build the HTTP post request:
|
||||
@ -95,17 +98,45 @@ public class ProviderGoogle(ILogger logger) : BaseProvider("https://generativela
|
||||
var geminiStream = await responseData.Response!.Content.ReadAsStreamAsync(token);
|
||||
|
||||
// Add a stream reader to read the stream, line by line:
|
||||
var streamReader = new StreamReader(geminiStream);
|
||||
streamReader = new StreamReader(geminiStream);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.logger.LogError($"Failed to stream chat completion from Google '{this.InstanceName}': {e.Message}");
|
||||
}
|
||||
|
||||
if (streamReader is null)
|
||||
yield break;
|
||||
|
||||
// Read the stream, line by line:
|
||||
while(!streamReader.EndOfStream)
|
||||
while(true)
|
||||
{
|
||||
try
|
||||
{
|
||||
if(streamReader.EndOfStream)
|
||||
break;
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.logger.LogWarning($"Failed to read the end-of-stream state from Google '{this.InstanceName}': {e.Message}");
|
||||
break;
|
||||
}
|
||||
|
||||
// Check if the token is canceled:
|
||||
if(token.IsCancellationRequested)
|
||||
yield break;
|
||||
|
||||
// Read the next line:
|
||||
var line = await streamReader.ReadLineAsync(token);
|
||||
string? line;
|
||||
try
|
||||
{
|
||||
line = await streamReader.ReadLineAsync(token);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.logger.LogError($"Failed to read the stream from Google '{this.InstanceName}': {e.Message}");
|
||||
break;
|
||||
}
|
||||
|
||||
// Skip empty lines:
|
||||
if(string.IsNullOrWhiteSpace(line))
|
||||
|
@ -72,6 +72,9 @@ public class ProviderGroq(ILogger logger) : BaseProvider("https://api.groq.com/o
|
||||
Stream = true,
|
||||
}, JSON_SERIALIZER_OPTIONS);
|
||||
|
||||
StreamReader? streamReader = null;
|
||||
try
|
||||
{
|
||||
async Task<HttpRequestMessage> RequestBuilder()
|
||||
{
|
||||
// Build the HTTP post request:
|
||||
@ -97,17 +100,45 @@ public class ProviderGroq(ILogger logger) : BaseProvider("https://api.groq.com/o
|
||||
var groqStream = await responseData.Response!.Content.ReadAsStreamAsync(token);
|
||||
|
||||
// Add a stream reader to read the stream, line by line:
|
||||
var streamReader = new StreamReader(groqStream);
|
||||
streamReader = new StreamReader(groqStream);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.logger.LogError($"Failed to stream chat completion from Groq '{this.InstanceName}': {e.Message}");
|
||||
}
|
||||
|
||||
if (streamReader is null)
|
||||
yield break;
|
||||
|
||||
// Read the stream, line by line:
|
||||
while(!streamReader.EndOfStream)
|
||||
while(true)
|
||||
{
|
||||
try
|
||||
{
|
||||
if(streamReader.EndOfStream)
|
||||
break;
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.logger.LogWarning($"Failed to read the end-of-stream state from Groq '{this.InstanceName}': {e.Message}");
|
||||
break;
|
||||
}
|
||||
|
||||
// Check if the token is canceled:
|
||||
if(token.IsCancellationRequested)
|
||||
yield break;
|
||||
|
||||
// Read the next line:
|
||||
var line = await streamReader.ReadLineAsync(token);
|
||||
string? line;
|
||||
try
|
||||
{
|
||||
line = await streamReader.ReadLineAsync(token);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.logger.LogError($"Failed to read the stream from Groq '{this.InstanceName}': {e.Message}");
|
||||
break;
|
||||
}
|
||||
|
||||
// Skip empty lines:
|
||||
if(string.IsNullOrWhiteSpace(line))
|
||||
|
@ -71,6 +71,9 @@ public sealed class ProviderMistral(ILogger logger) : BaseProvider("https://api.
|
||||
SafePrompt = false,
|
||||
}, JSON_SERIALIZER_OPTIONS);
|
||||
|
||||
StreamReader? streamReader = null;
|
||||
try
|
||||
{
|
||||
async Task<HttpRequestMessage> RequestBuilder()
|
||||
{
|
||||
// Build the HTTP post request:
|
||||
@ -96,17 +99,45 @@ public sealed class ProviderMistral(ILogger logger) : BaseProvider("https://api.
|
||||
var mistralStream = await responseData.Response!.Content.ReadAsStreamAsync(token);
|
||||
|
||||
// Add a stream reader to read the stream, line by line:
|
||||
var streamReader = new StreamReader(mistralStream);
|
||||
streamReader = new StreamReader(mistralStream);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.logger.LogError($"Failed to stream chat completion from Mistral '{this.InstanceName}': {e.Message}");
|
||||
}
|
||||
|
||||
if (streamReader is null)
|
||||
yield break;
|
||||
|
||||
// Read the stream, line by line:
|
||||
while(!streamReader.EndOfStream)
|
||||
while(true)
|
||||
{
|
||||
try
|
||||
{
|
||||
if(streamReader.EndOfStream)
|
||||
break;
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.logger.LogWarning($"Failed to read the end-of-stream state from Mistral '{this.InstanceName}': {e.Message}");
|
||||
break;
|
||||
}
|
||||
|
||||
// Check if the token is canceled:
|
||||
if(token.IsCancellationRequested)
|
||||
yield break;
|
||||
|
||||
// Read the next line:
|
||||
var line = await streamReader.ReadLineAsync(token);
|
||||
string? line;
|
||||
try
|
||||
{
|
||||
line = await streamReader.ReadLineAsync(token);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.logger.LogError($"Failed to read the stream from Mistral '{this.InstanceName}': {e.Message}");
|
||||
break;
|
||||
}
|
||||
|
||||
// Skip empty lines:
|
||||
if(string.IsNullOrWhiteSpace(line))
|
||||
|
@ -99,6 +99,9 @@ public sealed class ProviderOpenAI(ILogger logger) : BaseProvider("https://api.o
|
||||
Stream = true,
|
||||
}, JSON_SERIALIZER_OPTIONS);
|
||||
|
||||
StreamReader? streamReader = null;
|
||||
try
|
||||
{
|
||||
async Task<HttpRequestMessage> RequestBuilder()
|
||||
{
|
||||
// Build the HTTP post request:
|
||||
@ -124,17 +127,45 @@ public sealed class ProviderOpenAI(ILogger logger) : BaseProvider("https://api.o
|
||||
var openAIStream = await responseData.Response!.Content.ReadAsStreamAsync(token);
|
||||
|
||||
// Add a stream reader to read the stream, line by line:
|
||||
var streamReader = new StreamReader(openAIStream);
|
||||
streamReader = new StreamReader(openAIStream);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.logger.LogError($"Failed to stream chat completion from OpenAI '{this.InstanceName}': {e.Message}");
|
||||
}
|
||||
|
||||
if (streamReader is null)
|
||||
yield break;
|
||||
|
||||
// Read the stream, line by line:
|
||||
while(!streamReader.EndOfStream)
|
||||
while(true)
|
||||
{
|
||||
try
|
||||
{
|
||||
if(streamReader.EndOfStream)
|
||||
break;
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.logger.LogWarning($"Failed to read the end-of-stream state from OpenAI '{this.InstanceName}': {e.Message}");
|
||||
break;
|
||||
}
|
||||
|
||||
// Check if the token is canceled:
|
||||
if(token.IsCancellationRequested)
|
||||
yield break;
|
||||
|
||||
// Read the next line:
|
||||
var line = await streamReader.ReadLineAsync(token);
|
||||
string? line;
|
||||
try
|
||||
{
|
||||
line = await streamReader.ReadLineAsync(token);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.logger.LogError($"Failed to read the stream from OpenAI '{this.InstanceName}': {e.Message}");
|
||||
break;
|
||||
}
|
||||
|
||||
// Skip empty lines:
|
||||
if(string.IsNullOrWhiteSpace(line))
|
||||
|
@ -67,7 +67,7 @@ public sealed class ProviderSelfHosted(ILogger logger, Host host, string hostnam
|
||||
MaxTokens = -1,
|
||||
}, JSON_SERIALIZER_OPTIONS);
|
||||
|
||||
StreamReader? streamReader = default;
|
||||
StreamReader? streamReader = null;
|
||||
try
|
||||
{
|
||||
async Task<HttpRequestMessage> RequestBuilder()
|
||||
@ -103,17 +103,38 @@ public sealed class ProviderSelfHosted(ILogger logger, Host host, string hostnam
|
||||
this.logger.LogError($"Failed to stream chat completion from self-hosted provider '{this.InstanceName}': {e.Message}");
|
||||
}
|
||||
|
||||
if (streamReader is not null)
|
||||
{
|
||||
if (streamReader is null)
|
||||
yield break;
|
||||
|
||||
// Read the stream, line by line:
|
||||
while (!streamReader.EndOfStream)
|
||||
while (true)
|
||||
{
|
||||
try
|
||||
{
|
||||
if(streamReader.EndOfStream)
|
||||
break;
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.logger.LogWarning($"Failed to read the end-of-stream state from self-hosted provider '{this.InstanceName}': {e.Message}");
|
||||
break;
|
||||
}
|
||||
|
||||
// Check if the token is canceled:
|
||||
if (token.IsCancellationRequested)
|
||||
yield break;
|
||||
|
||||
// Read the next line:
|
||||
var line = await streamReader.ReadLineAsync(token);
|
||||
string? line;
|
||||
try
|
||||
{
|
||||
line = await streamReader.ReadLineAsync(token);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
this.logger.LogError($"Failed to read the stream from self-hosted provider '{this.InstanceName}': {e.Message}");
|
||||
break;
|
||||
}
|
||||
|
||||
// Skip empty lines:
|
||||
if (string.IsNullOrWhiteSpace(line))
|
||||
@ -151,7 +172,8 @@ public sealed class ProviderSelfHosted(ILogger logger, Host host, string hostnam
|
||||
// Yield the response:
|
||||
yield return providerResponse.Choices[0].Delta.Content;
|
||||
}
|
||||
}
|
||||
|
||||
streamReader.Dispose();
|
||||
}
|
||||
|
||||
#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||
|
@ -2,5 +2,6 @@
|
||||
- Added a button to remove a message from the chat thread.
|
||||
- Added a button to regenerate the last AI response.
|
||||
- Added a button to edit the last user message.
|
||||
- Added a button to stop the AI from generating a response.
|
||||
- Added a button to stop the AI from generating more tokens.
|
||||
- Improved the stream handling for all providers. The stream handling is now very resilient and handles all kinds of network issues.
|
||||
- Fixed a streaming bug that was particularly visible with self-hosted providers.
|
Loading…
Reference in New Issue
Block a user