diff --git a/app/MindWork AI Studio/Assistants/AssistantBase.razor.cs b/app/MindWork AI Studio/Assistants/AssistantBase.razor.cs index 7f92ed0..4bcd672 100644 --- a/app/MindWork AI Studio/Assistants/AssistantBase.razor.cs +++ b/app/MindWork AI Studio/Assistants/AssistantBase.razor.cs @@ -166,6 +166,7 @@ public abstract partial class AssistantBase : ComponentBase, IMessageBusReceiver { this.chatThread = new() { + SelectedProvider = this.providerSettings.Id, WorkspaceId = Guid.Empty, ChatId = Guid.NewGuid(), Name = string.Empty, @@ -185,6 +186,7 @@ public abstract partial class AssistantBase : ComponentBase, IMessageBusReceiver var chatId = Guid.NewGuid(); this.chatThread = new() { + SelectedProvider = this.providerSettings.Id, WorkspaceId = workspaceId, ChatId = chatId, Name = name, @@ -235,8 +237,13 @@ public abstract partial class AssistantBase : ComponentBase, IMessageBusReceiver Role = ChatRole.AI, Content = aiText, }; - - this.chatThread?.Blocks.Add(this.resultingContentBlock); + + if (this.chatThread is not null) + { + this.chatThread.Blocks.Add(this.resultingContentBlock); + this.chatThread.SelectedProvider = this.providerSettings.Id; + } + this.isProcessing = true; this.StateHasChanged(); @@ -284,7 +291,9 @@ public abstract partial class AssistantBase : ComponentBase, IMessageBusReceiver switch (destination) { case Tools.Components.CHAT: - MessageBus.INSTANCE.DeferMessage(this, sendToData.Event, this.ConvertToChatThread); + var convertedChatThread = this.ConvertToChatThread; + convertedChatThread = convertedChatThread with { SelectedProvider = this.providerSettings.Id }; + MessageBus.INSTANCE.DeferMessage(this, sendToData.Event, convertedChatThread); break; default: diff --git a/app/MindWork AI Studio/Chat/ChatThread.cs b/app/MindWork AI Studio/Chat/ChatThread.cs index ae369e8..2c74b10 100644 --- a/app/MindWork AI Studio/Chat/ChatThread.cs +++ b/app/MindWork AI Studio/Chat/ChatThread.cs @@ -15,6 +15,11 @@ public sealed record ChatThread /// public Guid WorkspaceId { get; set; } + /// + /// Specifies the provider selected for the chat thread. + /// + public string SelectedProvider { get; set; } = string.Empty; + /// /// The name of the chat thread. Usually generated by an AI model or manually edited by the user. /// diff --git a/app/MindWork AI Studio/Pages/Chat.razor.cs b/app/MindWork AI Studio/Pages/Chat.razor.cs index db841df..4a4d7ed 100644 --- a/app/MindWork AI Studio/Pages/Chat.razor.cs +++ b/app/MindWork AI Studio/Pages/Chat.razor.cs @@ -64,7 +64,6 @@ public partial class Chat : MSGComponentBase, IAsyncDisposable // Configure the spellchecking for the user input: this.SettingsManager.InjectSpellchecking(USER_INPUT_ATTRIBUTES); - this.providerSettings = this.SettingsManager.GetPreselectedProvider(Tools.Components.CHAT); this.currentProfile = this.SettingsManager.GetPreselectedProfile(Tools.Components.CHAT); var deferredContent = MessageBus.INSTANCE.CheckDeferredMessages(Event.SEND_TO_CHAT).FirstOrDefault(); if (deferredContent is not null) @@ -106,7 +105,8 @@ public partial class Chat : MSGComponentBase, IAsyncDisposable this.loadChat = deferredLoading; this.mustLoadChat = true; } - + + this.SelectProviderWhenLoadingChat(); await base.OnInitializedAsync(); } @@ -115,7 +115,12 @@ public partial class Chat : MSGComponentBase, IAsyncDisposable if (firstRender && this.chatThread is not null && this.mustStoreChat) { this.mustStoreChat = false; - await WorkspaceBehaviour.StoreChat(this.chatThread); + + if(this.workspaces is not null) + await this.workspaces.StoreChat(this.chatThread); + else + await WorkspaceBehaviour.StoreChat(this.chatThread); + this.currentWorkspaceId = this.chatThread.WorkspaceId; this.currentWorkspaceName = await WorkspaceBehaviour.LoadWorkspaceName(this.chatThread.WorkspaceId); } @@ -126,8 +131,11 @@ public partial class Chat : MSGComponentBase, IAsyncDisposable this.chatThread = await WorkspaceBehaviour.LoadChat(this.loadChat); if(this.chatThread is not null) + { this.currentWorkspaceName = await WorkspaceBehaviour.LoadWorkspaceName(this.chatThread.WorkspaceId); - + this.SelectProviderWhenLoadingChat(); + } + this.StateHasChanged(); } @@ -197,6 +205,7 @@ public partial class Chat : MSGComponentBase, IAsyncDisposable { this.chatThread = new() { + SelectedProvider = this.providerSettings.Id, WorkspaceId = this.currentWorkspaceId, ChatId = Guid.NewGuid(), Name = threadName, @@ -393,6 +402,19 @@ public partial class Chat : MSGComponentBase, IAsyncDisposable this.isStreaming = false; this.hasUnsavedChanges = false; this.userInput = string.Empty; + + switch (this.SettingsManager.ConfigurationData.Chat.AddChatProviderBehavior) + { + case AddChatProviderBehavior.ADDED_CHATS_USE_DEFAULT_PROVIDER: + this.providerSettings = this.SettingsManager.GetPreselectedProvider(Tools.Components.CHAT); + break; + + default: + case AddChatProviderBehavior.ADDED_CHATS_USE_LATEST_PROVIDER: + if(this.providerSettings == default) + this.providerSettings = this.SettingsManager.GetPreselectedProvider(Tools.Components.CHAT); + break; + } if (!useSameWorkspace) { @@ -404,6 +426,7 @@ public partial class Chat : MSGComponentBase, IAsyncDisposable { this.chatThread = new() { + SelectedProvider = this.providerSettings.Id, WorkspaceId = this.currentWorkspaceId, ChatId = Guid.NewGuid(), Name = string.Empty, @@ -486,7 +509,9 @@ public partial class Chat : MSGComponentBase, IAsyncDisposable this.userInput = string.Empty; this.currentWorkspaceId = this.chatThread?.WorkspaceId ?? Guid.Empty; this.currentWorkspaceName = this.chatThread is null ? string.Empty : await WorkspaceBehaviour.LoadWorkspaceName(this.chatThread.WorkspaceId); - + + this.SelectProviderWhenLoadingChat(); + this.userInput = string.Empty; if (this.SettingsManager.ConfigurationData.Chat.ShowLatestMessageAfterLoading) { @@ -506,6 +531,27 @@ public partial class Chat : MSGComponentBase, IAsyncDisposable this.chatThread = null; } + private void SelectProviderWhenLoadingChat() + { + var chatProvider = this.chatThread?.SelectedProvider; + switch (this.SettingsManager.ConfigurationData.Chat.LoadingProviderBehavior) + { + default: + case LoadingChatProviderBehavior.USE_CHAT_PROVIDER_IF_AVAILABLE: + this.providerSettings = this.SettingsManager.GetPreselectedProvider(Tools.Components.CHAT, chatProvider); + break; + + case LoadingChatProviderBehavior.ALWAYS_USE_DEFAULT_CHAT_PROVIDER: + this.providerSettings = this.SettingsManager.GetPreselectedProvider(Tools.Components.CHAT); + break; + + case LoadingChatProviderBehavior.ALWAYS_USE_LATEST_CHAT_PROVIDER: + if(this.providerSettings == default) + this.providerSettings = this.SettingsManager.GetPreselectedProvider(Tools.Components.CHAT); + break; + } + } + #region Overrides of MSGComponentBase public override async Task ProcessIncomingMessage(ComponentBase? sendingComponent, Event triggeredEvent, T? data) where T : default diff --git a/app/MindWork AI Studio/Pages/Settings.razor b/app/MindWork AI Studio/Pages/Settings.razor index 9ccbfe0..9f7277d 100644 --- a/app/MindWork AI Studio/Pages/Settings.razor +++ b/app/MindWork AI Studio/Pages/Settings.razor @@ -193,6 +193,9 @@ + + + diff --git a/app/MindWork AI Studio/Settings/ConfigurationSelectData.cs b/app/MindWork AI Studio/Settings/ConfigurationSelectData.cs index 7a65fb8..36123f7 100644 --- a/app/MindWork AI Studio/Settings/ConfigurationSelectData.cs +++ b/app/MindWork AI Studio/Settings/ConfigurationSelectData.cs @@ -25,6 +25,19 @@ public readonly record struct ConfigurationSelectData(string Name, T Value); /// public static class ConfigurationSelectDataFactory { + public static IEnumerable> GetLoadingChatProviderBehavior() + { + yield return new("When possible, use the LLM provider which was used for each chat in the first place", LoadingChatProviderBehavior.USE_CHAT_PROVIDER_IF_AVAILABLE); + yield return new("Use the latest LLM provider, which was used before; use the default chat provider initially", LoadingChatProviderBehavior.ALWAYS_USE_LATEST_CHAT_PROVIDER); + yield return new("Always use the default chat provider when loading chats", LoadingChatProviderBehavior.ALWAYS_USE_DEFAULT_CHAT_PROVIDER); + } + + public static IEnumerable> GetAddChatProviderBehavior() + { + yield return new("Use the latest LLM provider, which was used before; use the default chat provider initially", AddChatProviderBehavior.ADDED_CHATS_USE_LATEST_PROVIDER); + yield return new("Always use the default chat provider for new chats", AddChatProviderBehavior.ADDED_CHATS_USE_DEFAULT_PROVIDER); + } + public static IEnumerable> GetSendBehaviorData() { yield return new("No key is sending the input", SendBehavior.NO_KEY_IS_SENDING); diff --git a/app/MindWork AI Studio/Settings/DataModel/AddChatProviderBehavior.cs b/app/MindWork AI Studio/Settings/DataModel/AddChatProviderBehavior.cs new file mode 100644 index 0000000..d9f5b0c --- /dev/null +++ b/app/MindWork AI Studio/Settings/DataModel/AddChatProviderBehavior.cs @@ -0,0 +1,7 @@ +namespace AIStudio.Settings.DataModel; + +public enum AddChatProviderBehavior +{ + ADDED_CHATS_USE_DEFAULT_PROVIDER = 0, + ADDED_CHATS_USE_LATEST_PROVIDER, +} \ No newline at end of file diff --git a/app/MindWork AI Studio/Settings/DataModel/DataChat.cs b/app/MindWork AI Studio/Settings/DataModel/DataChat.cs index f68865f..8283150 100644 --- a/app/MindWork AI Studio/Settings/DataModel/DataChat.cs +++ b/app/MindWork AI Studio/Settings/DataModel/DataChat.cs @@ -7,6 +7,16 @@ public sealed class DataChat /// public SendBehavior ShortcutSendBehavior { get; set; } = SendBehavior.ENTER_IS_SENDING; + /// + /// Defines the provider behavior for loading a chat. + /// + public LoadingChatProviderBehavior LoadingProviderBehavior { get; set; } = LoadingChatProviderBehavior.USE_CHAT_PROVIDER_IF_AVAILABLE; + + /// + /// Defines the provider behavior when adding a chat. + /// + public AddChatProviderBehavior AddChatProviderBehavior { get; set; } = AddChatProviderBehavior.ADDED_CHATS_USE_LATEST_PROVIDER; + /// /// Preselect any chat options? /// diff --git a/app/MindWork AI Studio/Settings/DataModel/LoadingChatProviderBehavior.cs b/app/MindWork AI Studio/Settings/DataModel/LoadingChatProviderBehavior.cs new file mode 100644 index 0000000..45ef101 --- /dev/null +++ b/app/MindWork AI Studio/Settings/DataModel/LoadingChatProviderBehavior.cs @@ -0,0 +1,8 @@ +namespace AIStudio.Settings.DataModel; + +public enum LoadingChatProviderBehavior +{ + USE_CHAT_PROVIDER_IF_AVAILABLE = 0, + ALWAYS_USE_DEFAULT_CHAT_PROVIDER, + ALWAYS_USE_LATEST_CHAT_PROVIDER, +} \ No newline at end of file diff --git a/app/MindWork AI Studio/Settings/SettingsManager.cs b/app/MindWork AI Studio/Settings/SettingsManager.cs index 4944521..ca07e95 100644 --- a/app/MindWork AI Studio/Settings/SettingsManager.cs +++ b/app/MindWork AI Studio/Settings/SettingsManager.cs @@ -131,7 +131,7 @@ public sealed class SettingsManager(ILogger logger) return minimumLevel; } - public Provider GetPreselectedProvider(Tools.Components component) + public Provider GetPreselectedProvider(Tools.Components component, string? chatProviderId = null) { var minimumLevel = this.GetMinimumConfidenceLevel(component); @@ -139,6 +139,14 @@ public sealed class SettingsManager(ILogger logger) if (this.ConfigurationData.Providers.Count == 1 && this.ConfigurationData.Providers[0].UsedLLMProvider.GetConfidence(this).Level >= minimumLevel) return this.ConfigurationData.Providers[0]; + // When there is a chat provider, and it has a confidence level that is high enough, we return it: + if (chatProviderId is not null && !string.IsNullOrWhiteSpace(chatProviderId)) + { + var chatProvider = this.ConfigurationData.Providers.FirstOrDefault(x => x.Id == chatProviderId); + if (chatProvider.UsedLLMProvider.GetConfidence(this).Level >= minimumLevel) + return chatProvider; + } + // When there is a component-preselected provider, and it has a confidence level that is high enough, we return it: var preselectedProvider = component.PreselectedProvider(this); if(preselectedProvider != default && preselectedProvider.UsedLLMProvider.GetConfidence(this).Level >= minimumLevel) diff --git a/app/MindWork AI Studio/wwwroot/changelog/v0.9.21.md b/app/MindWork AI Studio/wwwroot/changelog/v0.9.21.md index bfe7ea8..51d4d9e 100644 --- a/app/MindWork AI Studio/wwwroot/changelog/v0.9.21.md +++ b/app/MindWork AI Studio/wwwroot/changelog/v0.9.21.md @@ -1,3 +1,5 @@ # v0.9.21, build 196 (2024-11-xx xx:xx UTC) +- Added: Chats remember which LLM provider was used. This is useful to continue exactly where you left off. +- Added options for how to decide which LLM provider should be chosen when creating new chats and when loading chats. When you want the previous behavior restored (always use the default chat provider), you can configure this behavior as well. - Fixed the missed workspace title when loading the today's bias. - Fixed auto-save when sending assistant results to a new chat. \ No newline at end of file