mirror of
https://github.com/MindWorkAI/AI-Studio.git
synced 2025-04-28 10:39:47 +00:00
Refactored CreateFromProviderAsync method to return the modified chat thread
This commit is contained in:
parent
51bf077485
commit
ff0df4d0dd
@ -293,7 +293,7 @@ public abstract partial class AssistantBase : ComponentBase, IMessageBusReceiver
|
|||||||
// Use the selected provider to get the AI response.
|
// Use the selected provider to get the AI response.
|
||||||
// By awaiting this line, we wait for the entire
|
// By awaiting this line, we wait for the entire
|
||||||
// content to be streamed.
|
// content to be streamed.
|
||||||
await aiText.CreateFromProviderAsync(this.providerSettings.CreateProvider(this.Logger), this.providerSettings.Model, this.lastUserPrompt, this.chatThread);
|
this.chatThread = await aiText.CreateFromProviderAsync(this.providerSettings.CreateProvider(this.Logger), this.providerSettings.Model, this.lastUserPrompt, this.chatThread);
|
||||||
|
|
||||||
this.isProcessing = false;
|
this.isProcessing = false;
|
||||||
this.StateHasChanged();
|
this.StateHasChanged();
|
||||||
|
@ -28,7 +28,7 @@ public sealed class ContentImage : IContent, IImageSource
|
|||||||
public Func<Task> StreamingEvent { get; set; } = () => Task.CompletedTask;
|
public Func<Task> StreamingEvent { get; set; } = () => Task.CompletedTask;
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public Task CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastPrompt, ChatThread? chatChatThread, CancellationToken token = default)
|
public Task<ChatThread> CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastPrompt, ChatThread? chatChatThread, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
throw new NotImplementedException();
|
throw new NotImplementedException();
|
||||||
}
|
}
|
||||||
|
@ -36,10 +36,10 @@ public sealed class ContentText : IContent
|
|||||||
public Func<Task> StreamingEvent { get; set; } = () => Task.CompletedTask;
|
public Func<Task> StreamingEvent { get; set; } = () => Task.CompletedTask;
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public async Task CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastPrompt, ChatThread? chatThread, CancellationToken token = default)
|
public async Task<ChatThread> CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastPrompt, ChatThread? chatThread, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
if(chatThread is null)
|
if(chatThread is null)
|
||||||
return;
|
return new();
|
||||||
|
|
||||||
// Call the RAG process. Right now, we only have one RAG process:
|
// Call the RAG process. Right now, we only have one RAG process:
|
||||||
if (lastPrompt is not null)
|
if (lastPrompt is not null)
|
||||||
@ -115,6 +115,7 @@ public sealed class ContentText : IContent
|
|||||||
|
|
||||||
// Inform the UI that the streaming is done:
|
// Inform the UI that the streaming is done:
|
||||||
await this.StreamingDone();
|
await this.StreamingDone();
|
||||||
|
return chatThread;
|
||||||
}
|
}
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
@ -41,7 +41,7 @@ public interface IContent
|
|||||||
/// <summary>
|
/// <summary>
|
||||||
/// Uses the provider to create the content.
|
/// Uses the provider to create the content.
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public Task CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastPrompt, ChatThread? chatChatThread, CancellationToken token = default);
|
public Task<ChatThread> CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastPrompt, ChatThread? chatChatThread, CancellationToken token = default);
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Returns the corresponding ERI content type.
|
/// Returns the corresponding ERI content type.
|
||||||
|
@ -475,7 +475,7 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
|
|||||||
// Use the selected provider to get the AI response.
|
// Use the selected provider to get the AI response.
|
||||||
// By awaiting this line, we wait for the entire
|
// By awaiting this line, we wait for the entire
|
||||||
// content to be streamed.
|
// content to be streamed.
|
||||||
await aiText.CreateFromProviderAsync(this.Provider.CreateProvider(this.Logger), this.Provider.Model, lastUserPrompt, this.ChatThread, this.cancellationTokenSource.Token);
|
this.ChatThread = await aiText.CreateFromProviderAsync(this.Provider.CreateProvider(this.Logger), this.Provider.Model, lastUserPrompt, this.ChatThread, this.cancellationTokenSource.Token);
|
||||||
}
|
}
|
||||||
|
|
||||||
this.cancellationTokenSource = null;
|
this.cancellationTokenSource = null;
|
||||||
|
@ -139,7 +139,7 @@ public partial class Writer : MSGComponentBase, IAsyncDisposable
|
|||||||
this.isStreaming = true;
|
this.isStreaming = true;
|
||||||
this.StateHasChanged();
|
this.StateHasChanged();
|
||||||
|
|
||||||
await aiText.CreateFromProviderAsync(this.providerSettings.CreateProvider(this.Logger), this.providerSettings.Model, lastUserPrompt, this.chatThread);
|
this.chatThread = await aiText.CreateFromProviderAsync(this.providerSettings.CreateProvider(this.Logger), this.providerSettings.Model, lastUserPrompt, this.chatThread);
|
||||||
this.suggestion = aiText.Text;
|
this.suggestion = aiText.Text;
|
||||||
|
|
||||||
this.isStreaming = false;
|
this.isStreaming = false;
|
||||||
|
@ -4,4 +4,5 @@
|
|||||||
- Improved the ERI client to raise an error when the server responds with additional JSON data that is not expected.
|
- Improved the ERI client to raise an error when the server responds with additional JSON data that is not expected.
|
||||||
- Improved the error handling in the ERI data source info dialog in cases where servers respond with an invalid message.
|
- Improved the error handling in the ERI data source info dialog in cases where servers respond with an invalid message.
|
||||||
- Improved the error handling for the entire RAG process.
|
- Improved the error handling for the entire RAG process.
|
||||||
|
- Improved chat thread persistence after modifications through the RAG process.
|
||||||
- Fixed the chat thread we use for the data retrieval by removing the last block, which is meant to be for the final AI answer.
|
- Fixed the chat thread we use for the data retrieval by removing the last block, which is meant to be for the final AI answer.
|
Loading…
Reference in New Issue
Block a user