diff --git a/app/MindWork AI Studio/Assistants/AssistantBase.razor.cs b/app/MindWork AI Studio/Assistants/AssistantBase.razor.cs
index 99f29df..b589f8f 100644
--- a/app/MindWork AI Studio/Assistants/AssistantBase.razor.cs
+++ b/app/MindWork AI Studio/Assistants/AssistantBase.razor.cs
@@ -209,16 +209,12 @@ public abstract partial class AssistantBase : ComponentBase, IMessageBusReceiver
this.chatThread = new()
{
SelectedProvider = this.providerSettings.Id,
+ SelectedProfile = this.AllowProfiles ? this.currentProfile.Id : Profile.NO_PROFILE.Id,
+ SystemPrompt = this.SystemPrompt,
WorkspaceId = Guid.Empty,
ChatId = Guid.NewGuid(),
- Name = string.Empty,
+ Name = $"Assistant - {this.Title}",
Seed = this.RNG.Next(),
- SystemPrompt = !this.AllowProfiles ? this.SystemPrompt :
- $"""
- {this.SystemPrompt}
-
- {this.currentProfile.ToSystemPrompt()}
- """,
Blocks = [],
};
}
@@ -229,16 +225,12 @@ public abstract partial class AssistantBase : ComponentBase, IMessageBusReceiver
this.chatThread = new()
{
SelectedProvider = this.providerSettings.Id,
+ SelectedProfile = this.AllowProfiles ? this.currentProfile.Id : Profile.NO_PROFILE.Id,
+ SystemPrompt = this.SystemPrompt,
WorkspaceId = workspaceId,
ChatId = chatId,
Name = name,
Seed = this.RNG.Next(),
- SystemPrompt = !this.AllowProfiles ? this.SystemPrompt :
- $"""
- {this.SystemPrompt}
-
- {this.currentProfile.ToSystemPrompt()}
- """,
Blocks = [],
};
diff --git a/app/MindWork AI Studio/Chat/ChatThread.cs b/app/MindWork AI Studio/Chat/ChatThread.cs
index 2c74b10..b66ad1d 100644
--- a/app/MindWork AI Studio/Chat/ChatThread.cs
+++ b/app/MindWork AI Studio/Chat/ChatThread.cs
@@ -1,3 +1,5 @@
+using AIStudio.Settings;
+
namespace AIStudio.Chat;
///
@@ -20,6 +22,11 @@ public sealed record ChatThread
///
public string SelectedProvider { get; set; } = string.Empty;
+ ///
+ /// Specifies the profile selected for the chat thread.
+ ///
+ public string SelectedProfile { get; set; } = string.Empty;
+
///
/// The name of the chat thread. Usually generated by an AI model or manually edited by the user.
///
@@ -39,4 +46,55 @@ public sealed record ChatThread
/// The content blocks of the chat thread.
///
public List Blocks { get; init; } = [];
+
+ ///
+ /// Prepares the system prompt for the chat thread.
+ ///
+ ///
+ /// The actual system prompt depends on the selected profile. If no profile is selected,
+ /// the system prompt is returned as is. When a profile is selected, the system prompt
+ /// is extended with the profile chosen.
+ ///
+ /// The settings manager instance to use.
+ /// The chat thread to prepare the system prompt for.
+ /// The logger instance to use.
+ /// The prepared system prompt.
+ public string PrepareSystemPrompt(SettingsManager settingsManager, ChatThread chatThread, ILogger logger)
+ {
+ //
+ // Prepare the system prompt:
+ //
+ string systemPromptText;
+ var logMessage = $"Using no profile for chat thread '{chatThread.Name}'.";
+ if (string.IsNullOrWhiteSpace(chatThread.SelectedProfile))
+ systemPromptText = chatThread.SystemPrompt;
+ else
+ {
+ if(!Guid.TryParse(chatThread.SelectedProfile, out var profileId))
+ systemPromptText = chatThread.SystemPrompt;
+ else
+ {
+ if(chatThread.SelectedProfile == Profile.NO_PROFILE.Id || profileId == Guid.Empty)
+ systemPromptText = chatThread.SystemPrompt;
+ else
+ {
+ var profile = settingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == chatThread.SelectedProfile);
+ if(profile == default)
+ systemPromptText = chatThread.SystemPrompt;
+ else
+ {
+ logMessage = $"Using profile '{profile.Name}' for chat thread '{chatThread.Name}'.";
+ systemPromptText = $"""
+ {chatThread.SystemPrompt}
+
+ {profile.ToSystemPrompt()}
+ """;
+ }
+ }
+ }
+ }
+
+ logger.LogInformation(logMessage);
+ return systemPromptText;
+ }
}
\ No newline at end of file
diff --git a/app/MindWork AI Studio/Chat/ContentText.cs b/app/MindWork AI Studio/Chat/ContentText.cs
index 8cb5fe2..f7cc62f 100644
--- a/app/MindWork AI Studio/Chat/ContentText.cs
+++ b/app/MindWork AI Studio/Chat/ContentText.cs
@@ -54,7 +54,7 @@ public sealed class ContentText : IContent
this.InitialRemoteWait = true;
// Iterate over the responses from the AI:
- await foreach (var deltaText in provider.StreamChatCompletion(chatModel, chatThread, token))
+ await foreach (var deltaText in provider.StreamChatCompletion(chatModel, chatThread, settings, token))
{
// When the user cancels the request, we stop the loop:
if (token.IsCancellationRequested)
diff --git a/app/MindWork AI Studio/Components/ChatComponent.razor.cs b/app/MindWork AI Studio/Components/ChatComponent.razor.cs
index c22b385..f324114 100644
--- a/app/MindWork AI Studio/Components/ChatComponent.razor.cs
+++ b/app/MindWork AI Studio/Components/ChatComponent.razor.cs
@@ -210,11 +210,7 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
this.ChatThread = this.ChatThread with
{
- SystemPrompt = $"""
- {SystemPrompts.DEFAULT}
-
- {this.currentProfile.ToSystemPrompt()}
- """
+ SelectedProfile = this.currentProfile.Id,
};
await this.ChatThreadChanged.InvokeAsync(this.ChatThread);
@@ -263,15 +259,12 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
this.ChatThread = new()
{
SelectedProvider = this.Provider.Id,
+ SelectedProfile = this.currentProfile.Id,
+ SystemPrompt = SystemPrompts.DEFAULT,
WorkspaceId = this.currentWorkspaceId,
ChatId = Guid.NewGuid(),
Name = threadName,
Seed = this.RNG.Next(),
- SystemPrompt = $"""
- {SystemPrompts.DEFAULT}
-
- {this.currentProfile.ToSystemPrompt()}
- """,
Blocks = [],
};
@@ -282,6 +275,10 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
// Set the thread name if it is empty:
if (string.IsNullOrWhiteSpace(this.ChatThread.Name))
this.ChatThread.Name = threadName;
+
+ // Update provider and profile:
+ this.ChatThread.SelectedProvider = this.Provider.Id;
+ this.ChatThread.SelectedProfile = this.currentProfile.Id;
}
//
@@ -443,15 +440,12 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
this.ChatThread = new()
{
SelectedProvider = this.Provider.Id,
+ SelectedProfile = this.currentProfile.Id,
+ SystemPrompt = SystemPrompts.DEFAULT,
WorkspaceId = this.currentWorkspaceId,
ChatId = Guid.NewGuid(),
Name = string.Empty,
Seed = this.RNG.Next(),
- SystemPrompt = $"""
- {SystemPrompts.DEFAULT}
-
- {this.currentProfile.ToSystemPrompt()}
- """,
Blocks = [],
};
}
@@ -542,6 +536,8 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
private async Task SelectProviderWhenLoadingChat()
{
var chatProvider = this.ChatThread?.SelectedProvider;
+ var chatProfile = this.ChatThread?.SelectedProfile;
+
switch (this.SettingsManager.ConfigurationData.Chat.LoadingProviderBehavior)
{
default:
@@ -560,6 +556,14 @@ public partial class ChatComponent : MSGComponentBase, IAsyncDisposable
}
await this.ProviderChanged.InvokeAsync(this.Provider);
+
+ // Try to select the profile:
+ if (!string.IsNullOrWhiteSpace(chatProfile))
+ {
+ this.currentProfile = this.SettingsManager.ConfigurationData.Profiles.FirstOrDefault(x => x.Id == chatProfile);
+ if(this.currentProfile == default)
+ this.currentProfile = Profile.NO_PROFILE;
+ }
}
private async Task ToggleWorkspaceOverlay()
diff --git a/app/MindWork AI Studio/Provider/Anthropic/ProviderAnthropic.cs b/app/MindWork AI Studio/Provider/Anthropic/ProviderAnthropic.cs
index 7c85ff1..c6ac4bc 100644
--- a/app/MindWork AI Studio/Provider/Anthropic/ProviderAnthropic.cs
+++ b/app/MindWork AI Studio/Provider/Anthropic/ProviderAnthropic.cs
@@ -4,6 +4,7 @@ using System.Text.Json;
using AIStudio.Chat;
using AIStudio.Provider.OpenAI;
+using AIStudio.Settings;
namespace AIStudio.Provider.Anthropic;
@@ -21,7 +22,7 @@ public sealed class ProviderAnthropic(ILogger logger) : BaseProvider("https://ap
public override string InstanceName { get; set; } = "Anthropic";
///
- public override async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default)
+ public override async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{
// Get the API key:
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
@@ -52,7 +53,7 @@ public sealed class ProviderAnthropic(ILogger logger) : BaseProvider("https://ap
}
}).ToList()],
- System = chatThread.SystemPrompt,
+ System = chatThread.PrepareSystemPrompt(settingsManager, chatThread, this.logger),
MaxTokens = 4_096,
// Right now, we only support streaming completions:
diff --git a/app/MindWork AI Studio/Provider/BaseProvider.cs b/app/MindWork AI Studio/Provider/BaseProvider.cs
index bf5501c..58810b7 100644
--- a/app/MindWork AI Studio/Provider/BaseProvider.cs
+++ b/app/MindWork AI Studio/Provider/BaseProvider.cs
@@ -1,6 +1,7 @@
using System.Net;
using AIStudio.Chat;
+using AIStudio.Settings;
using RustService = AIStudio.Tools.RustService;
@@ -53,7 +54,7 @@ public abstract class BaseProvider : IProvider, ISecretId
public abstract string InstanceName { get; set; }
///
- public abstract IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, CancellationToken token = default);
+ public abstract IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, CancellationToken token = default);
///
public abstract IAsyncEnumerable StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, CancellationToken token = default);
diff --git a/app/MindWork AI Studio/Provider/Fireworks/ProviderFireworks.cs b/app/MindWork AI Studio/Provider/Fireworks/ProviderFireworks.cs
index 3ab0c50..ff04cc9 100644
--- a/app/MindWork AI Studio/Provider/Fireworks/ProviderFireworks.cs
+++ b/app/MindWork AI Studio/Provider/Fireworks/ProviderFireworks.cs
@@ -4,6 +4,7 @@ using System.Text;
using System.Text.Json;
using AIStudio.Chat;
+using AIStudio.Settings;
namespace AIStudio.Provider.Fireworks;
@@ -23,7 +24,7 @@ public class ProviderFireworks(ILogger logger) : BaseProvider("https://api.firew
public override string InstanceName { get; set; } = "Fireworks.ai";
///
- public override async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default)
+ public override async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{
// Get the API key:
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
@@ -34,7 +35,7 @@ public class ProviderFireworks(ILogger logger) : BaseProvider("https://api.firew
var systemPrompt = new Message
{
Role = "system",
- Content = chatThread.SystemPrompt,
+ Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread, this.logger),
};
// Prepare the Fireworks HTTP chat request:
diff --git a/app/MindWork AI Studio/Provider/Google/ProviderGoogle.cs b/app/MindWork AI Studio/Provider/Google/ProviderGoogle.cs
index e3ec173..7714405 100644
--- a/app/MindWork AI Studio/Provider/Google/ProviderGoogle.cs
+++ b/app/MindWork AI Studio/Provider/Google/ProviderGoogle.cs
@@ -5,6 +5,7 @@ using System.Text.Json;
using AIStudio.Chat;
using AIStudio.Provider.OpenAI;
+using AIStudio.Settings;
namespace AIStudio.Provider.Google;
@@ -24,7 +25,7 @@ public class ProviderGoogle(ILogger logger) : BaseProvider("https://generativela
public override string InstanceName { get; set; } = "Google Gemini";
///
- public override async IAsyncEnumerable StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default)
+ public override async IAsyncEnumerable StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{
// Get the API key:
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
@@ -35,7 +36,7 @@ public class ProviderGoogle(ILogger logger) : BaseProvider("https://generativela
var systemPrompt = new Message
{
Role = "system",
- Content = chatThread.SystemPrompt,
+ Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread, this.logger),
};
// Prepare the Google HTTP chat request:
diff --git a/app/MindWork AI Studio/Provider/Groq/ProviderGroq.cs b/app/MindWork AI Studio/Provider/Groq/ProviderGroq.cs
index 5d0fed8..1d13fdc 100644
--- a/app/MindWork AI Studio/Provider/Groq/ProviderGroq.cs
+++ b/app/MindWork AI Studio/Provider/Groq/ProviderGroq.cs
@@ -5,6 +5,7 @@ using System.Text.Json;
using AIStudio.Chat;
using AIStudio.Provider.OpenAI;
+using AIStudio.Settings;
namespace AIStudio.Provider.Groq;
@@ -24,7 +25,7 @@ public class ProviderGroq(ILogger logger) : BaseProvider("https://api.groq.com/o
public override string InstanceName { get; set; } = "Groq";
///
- public override async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default)
+ public override async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{
// Get the API key:
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
@@ -35,7 +36,7 @@ public class ProviderGroq(ILogger logger) : BaseProvider("https://api.groq.com/o
var systemPrompt = new Message
{
Role = "system",
- Content = chatThread.SystemPrompt,
+ Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread, this.logger),
};
// Prepare the OpenAI HTTP chat request:
diff --git a/app/MindWork AI Studio/Provider/IProvider.cs b/app/MindWork AI Studio/Provider/IProvider.cs
index 69f6620..2256dff 100644
--- a/app/MindWork AI Studio/Provider/IProvider.cs
+++ b/app/MindWork AI Studio/Provider/IProvider.cs
@@ -1,4 +1,5 @@
using AIStudio.Chat;
+using AIStudio.Settings;
namespace AIStudio.Provider;
@@ -23,9 +24,10 @@ public interface IProvider
///
/// The model to use for chat completion.
/// The chat thread to continue.
+ /// The settings manager instance to use.
/// The cancellation token.
/// The chat completion stream.
- public IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, CancellationToken token = default);
+ public IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, CancellationToken token = default);
///
/// Starts an image completion stream.
diff --git a/app/MindWork AI Studio/Provider/Mistral/ProviderMistral.cs b/app/MindWork AI Studio/Provider/Mistral/ProviderMistral.cs
index b51778a..a66840b 100644
--- a/app/MindWork AI Studio/Provider/Mistral/ProviderMistral.cs
+++ b/app/MindWork AI Studio/Provider/Mistral/ProviderMistral.cs
@@ -5,6 +5,7 @@ using System.Text.Json;
using AIStudio.Chat;
using AIStudio.Provider.OpenAI;
+using AIStudio.Settings;
namespace AIStudio.Provider.Mistral;
@@ -22,7 +23,7 @@ public sealed class ProviderMistral(ILogger logger) : BaseProvider("https://api.
public override string InstanceName { get; set; } = "Mistral";
///
- public override async IAsyncEnumerable StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default)
+ public override async IAsyncEnumerable StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{
// Get the API key:
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
@@ -33,7 +34,7 @@ public sealed class ProviderMistral(ILogger logger) : BaseProvider("https://api.
var systemPrompt = new RegularMessage
{
Role = "system",
- Content = chatThread.SystemPrompt,
+ Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread, this.logger),
};
// Prepare the Mistral HTTP chat request:
diff --git a/app/MindWork AI Studio/Provider/NoProvider.cs b/app/MindWork AI Studio/Provider/NoProvider.cs
index 6efcc38..ce3fe31 100644
--- a/app/MindWork AI Studio/Provider/NoProvider.cs
+++ b/app/MindWork AI Studio/Provider/NoProvider.cs
@@ -1,6 +1,7 @@
using System.Runtime.CompilerServices;
using AIStudio.Chat;
+using AIStudio.Settings;
namespace AIStudio.Provider;
@@ -18,7 +19,7 @@ public class NoProvider : IProvider
public Task> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default) => Task.FromResult>([]);
- public async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatChatThread, [EnumeratorCancellation] CancellationToken token = default)
+ public async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatChatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{
await Task.FromResult(0);
yield break;
diff --git a/app/MindWork AI Studio/Provider/OpenAI/ProviderOpenAI.cs b/app/MindWork AI Studio/Provider/OpenAI/ProviderOpenAI.cs
index 1c55af6..9555aaf 100644
--- a/app/MindWork AI Studio/Provider/OpenAI/ProviderOpenAI.cs
+++ b/app/MindWork AI Studio/Provider/OpenAI/ProviderOpenAI.cs
@@ -4,6 +4,7 @@ using System.Text;
using System.Text.Json;
using AIStudio.Chat;
+using AIStudio.Settings;
namespace AIStudio.Provider.OpenAI;
@@ -26,7 +27,7 @@ public sealed class ProviderOpenAI(ILogger logger) : BaseProvider("https://api.o
public override string InstanceName { get; set; } = "OpenAI";
///
- public override async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default)
+ public override async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{
// Get the API key:
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
@@ -62,7 +63,7 @@ public sealed class ProviderOpenAI(ILogger logger) : BaseProvider("https://api.o
var systemPrompt = new Message
{
Role = systemPromptRole,
- Content = chatThread.SystemPrompt,
+ Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread, this.logger),
};
// Prepare the OpenAI HTTP chat request:
diff --git a/app/MindWork AI Studio/Provider/SelfHosted/ProviderSelfHosted.cs b/app/MindWork AI Studio/Provider/SelfHosted/ProviderSelfHosted.cs
index ec81247..0bf876d 100644
--- a/app/MindWork AI Studio/Provider/SelfHosted/ProviderSelfHosted.cs
+++ b/app/MindWork AI Studio/Provider/SelfHosted/ProviderSelfHosted.cs
@@ -5,6 +5,7 @@ using System.Text.Json;
using AIStudio.Chat;
using AIStudio.Provider.OpenAI;
+using AIStudio.Settings;
namespace AIStudio.Provider.SelfHosted;
@@ -22,7 +23,7 @@ public sealed class ProviderSelfHosted(ILogger logger, Host host, string hostnam
public override string InstanceName { get; set; } = "Self-hosted";
///
- public override async IAsyncEnumerable StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default)
+ public override async IAsyncEnumerable StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{
// Get the API key:
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, isTrying: true);
@@ -31,7 +32,7 @@ public sealed class ProviderSelfHosted(ILogger logger, Host host, string hostnam
var systemPrompt = new Message
{
Role = "system",
- Content = chatThread.SystemPrompt,
+ Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread, this.logger),
};
// Prepare the OpenAI HTTP chat request:
diff --git a/app/MindWork AI Studio/wwwroot/changelog/v0.9.23.md b/app/MindWork AI Studio/wwwroot/changelog/v0.9.23.md
index 67a0b50..51971d8 100644
--- a/app/MindWork AI Studio/wwwroot/changelog/v0.9.23.md
+++ b/app/MindWork AI Studio/wwwroot/changelog/v0.9.23.md
@@ -1,9 +1,11 @@
# v0.9.23, build 198 (2024-12-xx xx:xx UTC)
- Added an ERI server coding assistant as a preview feature behind the RAG feature flag. This helps you implement an ERI server to gain access to, e.g., your enterprise data from within AI Studio.
+- Improved profile handling: Every chat remembers the last profile used.
- Improved the chat UI: You can now set the aspect ratio between workspaces and chat as you like.
- Improved provider requests by handling rate limits by retrying requests.
- Improved the creation of the "the bias of the day" workspace; create that workspace only when the bias of the day feature is used.
- Improved the save operation of settings by using a temporary file to avoid data loss in rare cases.
+- Improved the system prompt handling: Injection of profiles into system prompts happens right before sending the data. This way, the original system prompts are not modified.
- Fixed OpenAI `o` (aka omni, aka reasoning) models. The early preview versions (released before 17th December 2024) could not use any system prompts —- we translated the system prompts to be user prompts. Final versions of the OpenAI `o` models can now use system prompts, by they are named `developer` instead of `system`.
- Fixed layout issues when selecting `other` items (e.g., programming languages).
- Fixed a bug about the bias of the day workspace when the workspace component was hidden.