diff --git a/README.md b/README.md index 4f8585a..d3830f3 100644 --- a/README.md +++ b/README.md @@ -9,10 +9,10 @@ Things we are currently working on: - [x] ~~Define the [External Data API (EDI)](https://github.com/MindWorkAI/EDI) as a contract for integrating arbitrary external data (PR [#1](https://github.com/MindWorkAI/EDI/pull/1))~~ - [x] ~~App: Metadata for providers (which provider offers embeddings?) (PR [#205](https://github.com/MindWorkAI/AI-Studio/pull/205))~~ - [x] ~~App: Add an option to show preview features (PR [#222](https://github.com/MindWorkAI/AI-Studio/pull/222))~~ - - [ ] App: Configure embedding providers + - [ ] ~~App: Configure embedding providers (PR [#224](https://github.com/MindWorkAI/AI-Studio/pull/224))~~ - [ ] App: Management of data sources (local & external data via [EDI](https://github.com/MindWorkAI/EDI)) - [ ] Runtime: Extract data from txt / md / pdf / docx / xlsx files - - [ ] Runtime: Implement internal embedding provider through [fastembed-rs](https://github.com/Anush008/fastembed-rs) + - [ ] (*Optional*) Runtime: Implement internal embedding provider through [fastembed-rs](https://github.com/Anush008/fastembed-rs) - [ ] App: Implement external embedding providers - [ ] App: Implement the process to vectorize one local file using embeddings - [ ] Runtime: Integration of the vector database [LanceDB](https://github.com/lancedb/lancedb) diff --git a/app/MindWork AI Studio/Components/PreviewAlpha.razor b/app/MindWork AI Studio/Components/PreviewAlpha.razor new file mode 100644 index 0000000..99f9d84 --- /dev/null +++ b/app/MindWork AI Studio/Components/PreviewAlpha.razor @@ -0,0 +1,20 @@ + + + + Alpha + + + +
+ + This feature is currently in the alpha phase. + Expect bugs and unfinished work. + + + + Alpha phase means that we are working on the + last details before the beta phase. + +
+
+
\ No newline at end of file diff --git a/app/MindWork AI Studio/Components/PreviewAlpha.razor.cs b/app/MindWork AI Studio/Components/PreviewAlpha.razor.cs new file mode 100644 index 0000000..6246685 --- /dev/null +++ b/app/MindWork AI Studio/Components/PreviewAlpha.razor.cs @@ -0,0 +1,5 @@ +using Microsoft.AspNetCore.Components; + +namespace AIStudio.Components; + +public partial class PreviewAlpha : ComponentBase; \ No newline at end of file diff --git a/app/MindWork AI Studio/Components/PreviewBeta.razor b/app/MindWork AI Studio/Components/PreviewBeta.razor new file mode 100644 index 0000000..cd6b3c6 --- /dev/null +++ b/app/MindWork AI Studio/Components/PreviewBeta.razor @@ -0,0 +1,19 @@ + + + + Beta + + + +
+ + This feature is currently in the beta phase. + It is still be possible that there are some bugs. + + + + Beta phase means that we are testing the feature. + +
+
+
\ No newline at end of file diff --git a/app/MindWork AI Studio/Components/PreviewBeta.razor.cs b/app/MindWork AI Studio/Components/PreviewBeta.razor.cs new file mode 100644 index 0000000..a5064b6 --- /dev/null +++ b/app/MindWork AI Studio/Components/PreviewBeta.razor.cs @@ -0,0 +1,5 @@ +using Microsoft.AspNetCore.Components; + +namespace AIStudio.Components; + +public partial class PreviewBeta : ComponentBase; \ No newline at end of file diff --git a/app/MindWork AI Studio/Components/PreviewExperimental.razor b/app/MindWork AI Studio/Components/PreviewExperimental.razor new file mode 100644 index 0000000..59e6651 --- /dev/null +++ b/app/MindWork AI Studio/Components/PreviewExperimental.razor @@ -0,0 +1,22 @@ + + + + Experimental + + + +
+ + This feature is currently in the experimental phase. + Expect bugs, unfinished work, changes in future + versions, and more. + + + + Experimental phase means that we have a vision for a feature + but not a clear plan yet. We are still exploring the + possibilities. + +
+
+
\ No newline at end of file diff --git a/app/MindWork AI Studio/Components/PreviewExperimental.razor.cs b/app/MindWork AI Studio/Components/PreviewExperimental.razor.cs new file mode 100644 index 0000000..c66fa73 --- /dev/null +++ b/app/MindWork AI Studio/Components/PreviewExperimental.razor.cs @@ -0,0 +1,5 @@ +using Microsoft.AspNetCore.Components; + +namespace AIStudio.Components; + +public partial class PreviewExperimental : ComponentBase; \ No newline at end of file diff --git a/app/MindWork AI Studio/Components/PreviewPrototype.razor b/app/MindWork AI Studio/Components/PreviewPrototype.razor new file mode 100644 index 0000000..f645e0c --- /dev/null +++ b/app/MindWork AI Studio/Components/PreviewPrototype.razor @@ -0,0 +1,21 @@ + + + + Prototype + + + +
+ + This feature is currently in the prototype phase. + Expect bugs, unfinished work, changes in future + versions, and more. + + + + Prototype phase means that we have a plan but we + are still working on it. + +
+
+
\ No newline at end of file diff --git a/app/MindWork AI Studio/Components/PreviewPrototype.razor.cs b/app/MindWork AI Studio/Components/PreviewPrototype.razor.cs new file mode 100644 index 0000000..573e2fd --- /dev/null +++ b/app/MindWork AI Studio/Components/PreviewPrototype.razor.cs @@ -0,0 +1,5 @@ +using Microsoft.AspNetCore.Components; + +namespace AIStudio.Components; + +public partial class PreviewPrototype : ComponentBase; \ No newline at end of file diff --git a/app/MindWork AI Studio/Components/PreviewReleaseCandidate.razor b/app/MindWork AI Studio/Components/PreviewReleaseCandidate.razor new file mode 100644 index 0000000..44b5108 --- /dev/null +++ b/app/MindWork AI Studio/Components/PreviewReleaseCandidate.razor @@ -0,0 +1,19 @@ + + + + Release Candidate + + + +
+ + This feature is about to be released. We think it's ready for production. + There should be no more bugs. + + + + Release candidates are the final step before a feature is proven to be stable. + +
+
+
\ No newline at end of file diff --git a/app/MindWork AI Studio/Components/PreviewReleaseCandidate.razor.cs b/app/MindWork AI Studio/Components/PreviewReleaseCandidate.razor.cs new file mode 100644 index 0000000..1d22d17 --- /dev/null +++ b/app/MindWork AI Studio/Components/PreviewReleaseCandidate.razor.cs @@ -0,0 +1,5 @@ +using Microsoft.AspNetCore.Components; + +namespace AIStudio.Components; + +public partial class PreviewReleaseCandidate : ComponentBase; \ No newline at end of file diff --git a/app/MindWork AI Studio/Dialogs/EmbeddingDialog.razor b/app/MindWork AI Studio/Dialogs/EmbeddingDialog.razor new file mode 100644 index 0000000..5305e43 --- /dev/null +++ b/app/MindWork AI Studio/Dialogs/EmbeddingDialog.razor @@ -0,0 +1,118 @@ +@using AIStudio.Provider +@using AIStudio.Provider.SelfHosted + + + + + + @* ReSharper disable once CSharpWarnings::CS8974 *@ + + @foreach (LLMProviders provider in Enum.GetValues(typeof(LLMProviders))) + { + if (provider.ProvideEmbeddings()) + { + @provider + } + } + + Create account + + + @* ReSharper disable once CSharpWarnings::CS8974 *@ + + + + + + @foreach (Host host in Enum.GetValues(typeof(Host))) + { + if (host.AreEmbeddingsSupported()) + { + @host.Name() + } + } + + + + @if (this.DataLLMProvider.IsEmbeddingModelProvidedManually(this.DataHost)) + { + + } + else + { + Load + + @foreach (var model in this.availableModels) + { + @model + } + + } + + + @* ReSharper disable once CSharpWarnings::CS8974 *@ + + + + + + + Cancel + + @if(this.IsEditing) + { + @:Update + } + else + { + @:Add + } + + + \ No newline at end of file diff --git a/app/MindWork AI Studio/Dialogs/EmbeddingDialog.razor.cs b/app/MindWork AI Studio/Dialogs/EmbeddingDialog.razor.cs new file mode 100644 index 0000000..5494ac0 --- /dev/null +++ b/app/MindWork AI Studio/Dialogs/EmbeddingDialog.razor.cs @@ -0,0 +1,258 @@ +using AIStudio.Provider; +using AIStudio.Settings; +using AIStudio.Tools.Validation; + +using Microsoft.AspNetCore.Components; + +using Host = AIStudio.Provider.SelfHosted.Host; + +namespace AIStudio.Dialogs; + +public partial class EmbeddingDialog : ComponentBase, ISecretId +{ + [CascadingParameter] + private MudDialogInstance MudDialog { get; set; } = null!; + + /// + /// The embedding's number in the list. + /// + [Parameter] + public uint DataNum { get; set; } + + /// + /// The embedding's ID. + /// + [Parameter] + public string DataId { get; set; } = Guid.NewGuid().ToString(); + + /// + /// The user chosen name. + /// + [Parameter] + public string DataName { get; set; } = string.Empty; + + /// + /// The chosen hostname for self-hosted providers. + /// + [Parameter] + public string DataHostname { get; set; } = string.Empty; + + /// + /// The host to use, e.g., llama.cpp. + /// + [Parameter] + public Host DataHost { get; set; } = Host.NONE; + + /// + /// Is this provider self-hosted? + /// + [Parameter] + public bool IsSelfHosted { get; set; } + + /// + /// The provider to use. + /// + [Parameter] + public LLMProviders DataLLMProvider { get; set; } = LLMProviders.NONE; + + /// + /// The embedding model to use. + /// + [Parameter] + public Model DataModel { get; set; } + + /// + /// Should the dialog be in editing mode? + /// + [Parameter] + public bool IsEditing { get; init; } + + [Inject] + private SettingsManager SettingsManager { get; init; } = null!; + + [Inject] + private ILogger Logger { get; init; } = null!; + + [Inject] + private RustService RustService { get; init; } = null!; + + private static readonly Dictionary SPELLCHECK_ATTRIBUTES = new(); + + /// + /// The list of used instance names. We need this to check for uniqueness. + /// + private List UsedInstanceNames { get; set; } = []; + + private bool dataIsValid; + private string[] dataIssues = []; + private string dataAPIKey = string.Empty; + private string dataManuallyModel = string.Empty; + private string dataAPIKeyStorageIssue = string.Empty; + private string dataEditingPreviousInstanceName = string.Empty; + + // We get the form reference from Blazor code to validate it manually: + private MudForm form = null!; + + private readonly List availableModels = new(); + private readonly Encryption encryption = Program.ENCRYPTION; + private readonly ProviderValidation providerValidation; + + public EmbeddingDialog() + { + this.providerValidation = new() + { + GetProvider = () => this.DataLLMProvider, + GetAPIKeyStorageIssue = () => this.dataAPIKeyStorageIssue, + GetPreviousInstanceName = () => this.dataEditingPreviousInstanceName, + GetUsedInstanceNames = () => this.UsedInstanceNames, + GetHost = () => this.DataHost, + }; + } + + private EmbeddingProvider CreateEmbeddingProviderSettings() + { + var cleanedHostname = this.DataHostname.Trim(); + return new() + { + Num = this.DataNum, + Id = this.DataId, + Name = this.DataName, + UsedLLMProvider = this.DataLLMProvider, + Model = this.DataLLMProvider is LLMProviders.SELF_HOSTED ? new Model(this.dataManuallyModel, null) : this.DataModel, + IsSelfHosted = this.DataLLMProvider is LLMProviders.SELF_HOSTED, + Hostname = cleanedHostname.EndsWith('/') ? cleanedHostname[..^1] : cleanedHostname, + Host = this.DataHost, + }; + } + + #region Overrides of ComponentBase + + protected override async Task OnInitializedAsync() + { + // Configure the spellchecking for the instance name input: + this.SettingsManager.InjectSpellchecking(SPELLCHECK_ATTRIBUTES); + + // Load the used instance names: + this.UsedInstanceNames = this.SettingsManager.ConfigurationData.EmbeddingProviders.Select(x => x.Name.ToLowerInvariant()).ToList(); + + // When editing, we need to load the data: + if(this.IsEditing) + { + this.dataEditingPreviousInstanceName = this.DataName.ToLowerInvariant(); + + // When using self-hosted embedding, we must copy the model name: + if (this.DataLLMProvider is LLMProviders.SELF_HOSTED) + this.dataManuallyModel = this.DataModel.Id; + + // + // We cannot load the API key for self-hosted providers: + // + if (this.DataLLMProvider is LLMProviders.SELF_HOSTED && this.DataHost is not Host.OLLAMA) + { + await this.ReloadModels(); + await base.OnInitializedAsync(); + return; + } + + // Load the API key: + var requestedSecret = await this.RustService.GetAPIKey(this, isTrying: this.DataLLMProvider is LLMProviders.SELF_HOSTED); + if (requestedSecret.Success) + this.dataAPIKey = await requestedSecret.Secret.Decrypt(this.encryption); + else + { + this.dataAPIKey = string.Empty; + if (this.DataLLMProvider is not LLMProviders.SELF_HOSTED) + { + this.dataAPIKeyStorageIssue = $"Failed to load the API key from the operating system. The message was: {requestedSecret.Issue}. You might ignore this message and provide the API key again."; + await this.form.Validate(); + } + } + + await this.ReloadModels(); + } + + await base.OnInitializedAsync(); + } + + protected override async Task OnAfterRenderAsync(bool firstRender) + { + // Reset the validation when not editing and on the first render. + // We don't want to show validation errors when the user opens the dialog. + if(!this.IsEditing && firstRender) + this.form.ResetValidation(); + + await base.OnAfterRenderAsync(firstRender); + } + + #endregion + + #region Implementation of ISecretId + + public string SecretId => this.DataId; + + public string SecretName => this.DataName; + + #endregion + + private async Task Store() + { + await this.form.Validate(); + if (!string.IsNullOrWhiteSpace(this.dataAPIKeyStorageIssue)) + this.dataAPIKeyStorageIssue = string.Empty; + + // When the data is not valid, we don't store it: + if (!this.dataIsValid) + return; + + // Use the data model to store the provider. + // We just return this data to the parent component: + var addedProviderSettings = this.CreateEmbeddingProviderSettings(); + if (!string.IsNullOrWhiteSpace(this.dataAPIKey)) + { + // Store the API key in the OS secure storage: + var storeResponse = await this.RustService.SetAPIKey(this, this.dataAPIKey); + if (!storeResponse.Success) + { + this.dataAPIKeyStorageIssue = $"Failed to store the API key in the operating system. The message was: {storeResponse.Issue}. Please try again."; + await this.form.Validate(); + return; + } + } + + this.MudDialog.Close(DialogResult.Ok(addedProviderSettings)); + } + + private string? ValidateManuallyModel(string manuallyModel) + { + if (this.DataLLMProvider is LLMProviders.SELF_HOSTED && string.IsNullOrWhiteSpace(manuallyModel)) + return "Please enter an embedding model name."; + + return null; + } + + private void Cancel() => this.MudDialog.Cancel(); + + private async Task ReloadModels() + { + var currentEmbeddingProviderSettings = this.CreateEmbeddingProviderSettings(); + var provider = currentEmbeddingProviderSettings.CreateProvider(this.Logger); + if(provider is NoProvider) + return; + + var models = await provider.GetEmbeddingModels(this.dataAPIKey); + + // Order descending by ID means that the newest models probably come first: + var orderedModels = models.OrderByDescending(n => n.Id); + + this.availableModels.Clear(); + this.availableModels.AddRange(orderedModels); + } + + private string APIKeyText => this.DataLLMProvider switch + { + LLMProviders.SELF_HOSTED => "(Optional) API Key", + _ => "API Key", + }; + + private bool IsNoneProvider => this.DataLLMProvider is LLMProviders.NONE; +} \ No newline at end of file diff --git a/app/MindWork AI Studio/Dialogs/ProviderDialog.razor b/app/MindWork AI Studio/Dialogs/ProviderDialog.razor index efd5513..108f737 100644 --- a/app/MindWork AI Studio/Dialogs/ProviderDialog.razor +++ b/app/MindWork AI Studio/Dialogs/ProviderDialog.razor @@ -6,13 +6,13 @@ @* ReSharper disable once CSharpWarnings::CS8974 *@ - + @foreach (LLMProviders provider in Enum.GetValues(typeof(LLMProviders))) { @provider } - Create account + Create account @* ReSharper disable once CSharpWarnings::CS8974 *@ @@ -20,29 +20,29 @@ T="string" @bind-Text="@this.dataAPIKey" Label="@this.APIKeyText" - Disabled="@(!this.NeedAPIKey)" + Disabled="@(!this.DataLLMProvider.IsAPIKeyNeeded(this.DataHost))" Class="mb-3" Adornment="Adornment.Start" AdornmentIcon="@Icons.Material.Filled.VpnKey" AdornmentColor="Color.Info" InputType="InputType.Password" - Validation="@this.ValidatingAPIKey" + Validation="@this.providerValidation.ValidatingAPIKey" /> - + @foreach (Host host in Enum.GetValues(typeof(Host))) { @host.Name() @@ -50,9 +50,9 @@ - @if (this.ProvideModelManually) + @if (this.DataLLMProvider.IsLLMModelProvidedManually()) { - Show available models + Show available models Load - + Load + @foreach (var model in this.availableModels) { @model @@ -89,7 +89,7 @@ Adornment="Adornment.Start" AdornmentIcon="@Icons.Material.Filled.Lightbulb" AdornmentColor="Color.Info" - Validation="@this.ValidatingInstanceName" + Validation="@this.providerValidation.ValidatingInstanceName" UserAttributes="@SPELLCHECK_ATTRIBUTES" /> diff --git a/app/MindWork AI Studio/Dialogs/ProviderDialog.razor.cs b/app/MindWork AI Studio/Dialogs/ProviderDialog.razor.cs index b3458cb..9e5fcb6 100644 --- a/app/MindWork AI Studio/Dialogs/ProviderDialog.razor.cs +++ b/app/MindWork AI Studio/Dialogs/ProviderDialog.razor.cs @@ -1,5 +1,6 @@ using AIStudio.Provider; using AIStudio.Settings; +using AIStudio.Tools.Validation; using Microsoft.AspNetCore.Components; @@ -11,7 +12,7 @@ namespace AIStudio.Dialogs; /// /// The provider settings dialog. /// -public partial class ProviderDialog : ComponentBase +public partial class ProviderDialog : ComponentBase, ISecretId { [CascadingParameter] private MudDialogInstance MudDialog { get; set; } = null!; @@ -41,7 +42,7 @@ public partial class ProviderDialog : ComponentBase public string DataHostname { get; set; } = string.Empty; /// - /// The local host to use, e.g., llama.cpp. + /// The host to use, e.g., llama.cpp. /// [Parameter] public Host DataHost { get; set; } = Host.NONE; @@ -98,6 +99,19 @@ public partial class ProviderDialog : ComponentBase private readonly List availableModels = new(); private readonly Encryption encryption = Program.ENCRYPTION; + private readonly ProviderValidation providerValidation; + + public ProviderDialog() + { + this.providerValidation = new() + { + GetProvider = () => this.DataLLMProvider, + GetAPIKeyStorageIssue = () => this.dataAPIKeyStorageIssue, + GetPreviousInstanceName = () => this.dataEditingPreviousInstanceName, + GetUsedInstanceNames = () => this.UsedInstanceNames, + GetHost = () => this.DataHost, + }; + } private Settings.Provider CreateProviderSettings() { @@ -144,23 +158,10 @@ public partial class ProviderDialog : ComponentBase return; } - var loadedProviderSettings = this.CreateProviderSettings(); - var provider = loadedProviderSettings.CreateProvider(this.Logger); - if(provider is NoProvider) - { - await base.OnInitializedAsync(); - return; - } - // Load the API key: - var requestedSecret = await this.RustService.GetAPIKey(provider, isTrying: this.DataLLMProvider is LLMProviders.SELF_HOSTED); - if(requestedSecret.Success) - { + var requestedSecret = await this.RustService.GetAPIKey(this, isTrying: this.DataLLMProvider is LLMProviders.SELF_HOSTED); + if (requestedSecret.Success) this.dataAPIKey = await requestedSecret.Secret.Decrypt(this.encryption); - - // Now, we try to load the list of available models: - await this.ReloadModels(); - } else { this.dataAPIKey = string.Empty; @@ -169,10 +170,9 @@ public partial class ProviderDialog : ComponentBase this.dataAPIKeyStorageIssue = $"Failed to load the API key from the operating system. The message was: {requestedSecret.Issue}. You might ignore this message and provide the API key again."; await this.form.Validate(); } - - // We still try to load the models. Some local hosts don't need an API key: - await this.ReloadModels(); } + + await this.ReloadModels(); } await base.OnInitializedAsync(); @@ -189,7 +189,15 @@ public partial class ProviderDialog : ComponentBase } #endregion + + #region Implementation of ISecretId + + public string SecretId => this.DataLLMProvider.ToName(); + public string SecretName => this.DataInstanceName; + + #endregion + private async Task Store() { await this.form.Validate(); @@ -205,11 +213,8 @@ public partial class ProviderDialog : ComponentBase var addedProviderSettings = this.CreateProviderSettings(); if (!string.IsNullOrWhiteSpace(this.dataAPIKey)) { - // We need to instantiate the provider to store the API key: - var provider = addedProviderSettings.CreateProvider(this.Logger); - // Store the API key in the OS secure storage: - var storeResponse = await this.RustService.SetAPIKey(provider, this.dataAPIKey); + var storeResponse = await this.RustService.SetAPIKey(this, this.dataAPIKey); if (!storeResponse.Success) { this.dataAPIKeyStorageIssue = $"Failed to store the API key in the operating system. The message was: {storeResponse.Issue}. Please try again."; @@ -221,25 +226,6 @@ public partial class ProviderDialog : ComponentBase this.MudDialog.Close(DialogResult.Ok(addedProviderSettings)); } - private string? ValidatingProvider(LLMProviders llmProvider) - { - if (llmProvider == LLMProviders.NONE) - return "Please select a provider."; - - return null; - } - - private string? ValidatingHost(Host host) - { - if(this.DataLLMProvider is not LLMProviders.SELF_HOSTED) - return null; - - if (host == Host.NONE) - return "Please select a host."; - - return null; - } - private string? ValidateManuallyModel(string manuallyModel) { if (this.DataLLMProvider is LLMProviders.FIREWORKS && string.IsNullOrWhiteSpace(manuallyModel)) @@ -247,64 +233,6 @@ public partial class ProviderDialog : ComponentBase return null; } - - private string? ValidatingModel(Model model) - { - if(this.DataLLMProvider is LLMProviders.SELF_HOSTED && this.DataHost == Host.LLAMACPP) - return null; - - if (model == default) - return "Please select a model."; - - return null; - } - - private string? ValidatingInstanceName(string instanceName) - { - if (string.IsNullOrWhiteSpace(instanceName)) - return "Please enter an instance name."; - - if (instanceName.Length > 40) - return "The instance name must not exceed 40 characters."; - - // The instance name must be unique: - var lowerInstanceName = instanceName.ToLowerInvariant(); - if (lowerInstanceName != this.dataEditingPreviousInstanceName && this.UsedInstanceNames.Contains(lowerInstanceName)) - return "The instance name must be unique; the chosen name is already in use."; - - return null; - } - - private string? ValidatingAPIKey(string apiKey) - { - if(this.DataLLMProvider is LLMProviders.SELF_HOSTED) - return null; - - if(!string.IsNullOrWhiteSpace(this.dataAPIKeyStorageIssue)) - return this.dataAPIKeyStorageIssue; - - if(string.IsNullOrWhiteSpace(apiKey)) - return "Please enter an API key."; - - return null; - } - - private string? ValidatingHostname(string hostname) - { - if(this.DataLLMProvider != LLMProviders.SELF_HOSTED) - return null; - - if(string.IsNullOrWhiteSpace(hostname)) - return "Please enter a hostname, e.g., http://localhost:1234"; - - if(!hostname.StartsWith("http://", StringComparison.InvariantCultureIgnoreCase) && !hostname.StartsWith("https://", StringComparison.InvariantCultureIgnoreCase)) - return "The hostname must start with either http:// or https://"; - - if(!Uri.TryCreate(hostname, UriKind.Absolute, out _)) - return "The hostname is not a valid HTTP(S) URL."; - - return null; - } private void Cancel() => this.MudDialog.Cancel(); @@ -324,109 +252,11 @@ public partial class ProviderDialog : ComponentBase this.availableModels.AddRange(orderedModels); } - private bool CanLoadModels() - { - if (this.DataLLMProvider is LLMProviders.SELF_HOSTED) - { - switch (this.DataHost) - { - case Host.NONE: - return false; - - case Host.LLAMACPP: - return false; - - case Host.LM_STUDIO: - return true; - - case Host.OLLAMA: - return true; - - default: - return false; - } - } - - if(this.DataLLMProvider is LLMProviders.NONE) - return false; - - if(string.IsNullOrWhiteSpace(this.dataAPIKey)) - return false; - - return true; - } - - private bool ShowRegisterButton => this.DataLLMProvider switch - { - LLMProviders.OPEN_AI => true, - LLMProviders.MISTRAL => true, - LLMProviders.ANTHROPIC => true, - LLMProviders.GOOGLE => true, - - LLMProviders.GROQ => true, - LLMProviders.FIREWORKS => true, - - _ => false, - }; - - private bool NeedAPIKey => this.DataLLMProvider switch - { - LLMProviders.OPEN_AI => true, - LLMProviders.MISTRAL => true, - LLMProviders.ANTHROPIC => true, - LLMProviders.GOOGLE => true, - - LLMProviders.GROQ => true, - LLMProviders.FIREWORKS => true, - - LLMProviders.SELF_HOSTED => this.DataHost is Host.OLLAMA, - - _ => false, - }; - private string APIKeyText => this.DataLLMProvider switch { LLMProviders.SELF_HOSTED => "(Optional) API Key", _ => "API Key", }; - - private bool NeedHostname => this.DataLLMProvider switch - { - LLMProviders.SELF_HOSTED => true, - _ => false, - }; - - private bool NeedHost => this.DataLLMProvider switch - { - LLMProviders.SELF_HOSTED => true, - _ => false, - }; - - private bool ProvideModelManually => this.DataLLMProvider switch - { - LLMProviders.FIREWORKS => true, - _ => false, - }; - - private string GetModelOverviewURL() => this.DataLLMProvider switch - { - LLMProviders.FIREWORKS => "https://fireworks.ai/models?show=Serverless", - - _ => string.Empty, - }; - - private string GetProviderCreationURL() => this.DataLLMProvider switch - { - LLMProviders.OPEN_AI => "https://platform.openai.com/signup", - LLMProviders.MISTRAL => "https://console.mistral.ai/", - LLMProviders.ANTHROPIC => "https://console.anthropic.com/dashboard", - LLMProviders.GOOGLE => "https://console.cloud.google.com/", - - LLMProviders.GROQ => "https://console.groq.com/", - LLMProviders.FIREWORKS => "https://fireworks.ai/login", - - _ => string.Empty, - }; private bool IsNoneProvider => this.DataLLMProvider is LLMProviders.NONE; } \ No newline at end of file diff --git a/app/MindWork AI Studio/Pages/Settings.razor b/app/MindWork AI Studio/Pages/Settings.razor index 70c8816..52d27e5 100644 --- a/app/MindWork AI Studio/Pages/Settings.razor +++ b/app/MindWork AI Studio/Pages/Settings.razor @@ -40,11 +40,11 @@ @if (context.UsedLLMProvider is not LLMProviders.SELF_HOSTED) { - @this.GetProviderModelName(context) + @this.GetLLMProviderModelName(context) } else if (context.UsedLLMProvider is LLMProviders.SELF_HOSTED && context.Host is not Host.LLAMACPP) { - @this.GetProviderModelName(context) + @this.GetLLMProviderModelName(context) } else { @@ -52,13 +52,13 @@ } - + Open Dashboard - + Edit - + Delete @@ -70,7 +70,7 @@ No providers configured yet. } - + Add Provider @@ -131,6 +131,73 @@ + @if (this.SettingsManager.ConfigurationData.App.PreviewVisibility >= PreviewVisibility.PROTOTYPE) + { + + + + Configured Embeddings + + + Embeddings are a way to represent words, sentences, entire documents, or even images and videos as digital + fingerprints. Just like each person has a unique fingerprint, embedding models create unique digital patterns + that capture the meaning and characteristics of the content they analyze. When two things are similar in meaning + or content, their digital fingerprints will look very similar. For example, the fingerprints for 'happy' and + 'joyful' would be more alike than those for 'happy' and 'sad'. + + + + This helps AI Studio understand and compare things in a way that's similar to how humans do. When you're working on + something, AI Studio can automatically identify related documents and data by comparing their digital fingerprints. + For instance, if you're writing about customer service, AI Studio can instantly find other documents in your data that + discuss similar topics or experiences, even if they use different words. + + + + + + + + + + + # + Name + Provider + Model + Actions + + + @context.Num + @context.Name + @context.UsedLLMProvider + @this.GetEmbeddingProviderModelName(context) + + + + Open Dashboard + + + Edit + + + Delete + + + + + + @if (this.SettingsManager.ConfigurationData.EmbeddingProviders.Count == 0) + { + No embeddings configured yet. + } + + + Add Embedding + + + } + Your Profiles @@ -187,7 +254,7 @@ - + @@ -199,7 +266,7 @@ - + @@ -219,7 +286,7 @@ - + @@ -237,7 +304,7 @@ } - + @@ -251,7 +318,7 @@ } - + @@ -273,7 +340,7 @@ } - + @@ -302,7 +369,7 @@ } - + @@ -316,7 +383,7 @@ } - + @@ -331,7 +398,7 @@ - + @@ -347,7 +414,7 @@ } - + @@ -368,7 +435,7 @@ } - + @@ -379,7 +446,7 @@ - + @@ -393,7 +460,7 @@ } - + @@ -407,7 +474,7 @@ } - + @@ -433,7 +500,7 @@ } - + @@ -444,7 +511,7 @@ and attempts to convert relative links into absolute links so that they can be used. - + diff --git a/app/MindWork AI Studio/Pages/Settings.razor.cs b/app/MindWork AI Studio/Pages/Settings.razor.cs index 8a2e109..a59859b 100644 --- a/app/MindWork AI Studio/Pages/Settings.razor.cs +++ b/app/MindWork AI Studio/Pages/Settings.razor.cs @@ -22,13 +22,11 @@ public partial class Settings : ComponentBase, IMessageBusReceiver, IDisposable [Inject] private MessageBus MessageBus { get; init; } = null!; - [Inject] - private ILogger Logger { get; init; } = null!; - [Inject] private RustService RustService { get; init; } = null!; - private readonly List> availableProviders = new(); + private readonly List> availableLLMProviders = new(); + private readonly List> availableEmbeddingProviders = new(); #region Overrides of ComponentBase @@ -46,14 +44,14 @@ public partial class Settings : ComponentBase, IMessageBusReceiver, IDisposable #region Provider related - private async Task AddProvider() + private async Task AddLLMProvider() { var dialogParameters = new DialogParameters { { x => x.IsEditing, false }, }; - var dialogReference = await this.DialogService.ShowAsync("Add Provider", dialogParameters, DialogOptions.FULLSCREEN); + var dialogReference = await this.DialogService.ShowAsync("Add LLM Provider", dialogParameters, DialogOptions.FULLSCREEN); var dialogResult = await dialogReference.Result; if (dialogResult is null || dialogResult.Canceled) return; @@ -68,7 +66,7 @@ public partial class Settings : ComponentBase, IMessageBusReceiver, IDisposable await this.MessageBus.SendMessage(this, Event.CONFIGURATION_CHANGED); } - private async Task EditProvider(AIStudio.Settings.Provider provider) + private async Task EditLLMProvider(AIStudio.Settings.Provider provider) { var dialogParameters = new DialogParameters { @@ -83,7 +81,7 @@ public partial class Settings : ComponentBase, IMessageBusReceiver, IDisposable { x => x.DataHost, provider.Host }, }; - var dialogReference = await this.DialogService.ShowAsync("Edit Provider", dialogParameters, DialogOptions.FULLSCREEN); + var dialogReference = await this.DialogService.ShowAsync("Edit LLM Provider", dialogParameters, DialogOptions.FULLSCREEN); var dialogResult = await dialogReference.Result; if (dialogResult is null || dialogResult.Canceled) return; @@ -102,20 +100,19 @@ public partial class Settings : ComponentBase, IMessageBusReceiver, IDisposable await this.MessageBus.SendMessage(this, Event.CONFIGURATION_CHANGED); } - private async Task DeleteProvider(AIStudio.Settings.Provider provider) + private async Task DeleteLLMProvider(AIStudio.Settings.Provider provider) { var dialogParameters = new DialogParameters { { "Message", $"Are you sure you want to delete the provider '{provider.InstanceName}'?" }, }; - var dialogReference = await this.DialogService.ShowAsync("Delete Provider", dialogParameters, DialogOptions.FULLSCREEN); + var dialogReference = await this.DialogService.ShowAsync("Delete LLM Provider", dialogParameters, DialogOptions.FULLSCREEN); var dialogResult = await dialogReference.Result; if (dialogResult is null || dialogResult.Canceled) return; - var providerInstance = provider.CreateProvider(this.Logger); - var deleteSecretResponse = await this.RustService.DeleteAPIKey(providerInstance); + var deleteSecretResponse = await this.RustService.DeleteAPIKey(provider); if(deleteSecretResponse.Success) { this.SettingsManager.ConfigurationData.Providers.Remove(provider); @@ -125,32 +122,8 @@ public partial class Settings : ComponentBase, IMessageBusReceiver, IDisposable this.UpdateProviders(); await this.MessageBus.SendMessage(this, Event.CONFIGURATION_CHANGED); } - - private bool HasDashboard(LLMProviders llmProvider) => llmProvider switch - { - LLMProviders.OPEN_AI => true, - LLMProviders.MISTRAL => true, - LLMProviders.ANTHROPIC => true, - LLMProviders.GROQ => true, - LLMProviders.FIREWORKS => true, - LLMProviders.GOOGLE => true, - - _ => false, - }; - - private string GetProviderDashboardURL(LLMProviders llmProvider) => llmProvider switch - { - LLMProviders.OPEN_AI => "https://platform.openai.com/usage", - LLMProviders.MISTRAL => "https://console.mistral.ai/usage/", - LLMProviders.ANTHROPIC => "https://console.anthropic.com/settings/plans", - LLMProviders.GROQ => "https://console.groq.com/settings/usage", - LLMProviders.GOOGLE => "https://console.cloud.google.com/billing", - LLMProviders.FIREWORKS => "https://fireworks.ai/account/billing", - - _ => string.Empty, - }; - private string GetProviderModelName(AIStudio.Settings.Provider provider) + private string GetLLMProviderModelName(AIStudio.Settings.Provider provider) { const int MAX_LENGTH = 36; var modelName = provider.Model.ToString(); @@ -159,9 +132,9 @@ public partial class Settings : ComponentBase, IMessageBusReceiver, IDisposable private void UpdateProviders() { - this.availableProviders.Clear(); + this.availableLLMProviders.Clear(); foreach (var provider in this.SettingsManager.ConfigurationData.Providers) - this.availableProviders.Add(new (provider.InstanceName, provider.Id)); + this.availableLLMProviders.Add(new (provider.InstanceName, provider.Id)); } private string GetCurrentConfidenceLevelName(LLMProviders llmProvider) @@ -188,6 +161,103 @@ public partial class Settings : ComponentBase, IMessageBusReceiver, IDisposable #endregion + #region Embedding provider related + + private string GetEmbeddingProviderModelName(EmbeddingProvider provider) + { + const int MAX_LENGTH = 36; + var modelName = provider.Model.ToString(); + return modelName.Length > MAX_LENGTH ? "[...] " + modelName[^Math.Min(MAX_LENGTH, modelName.Length)..] : modelName; + } + + private async Task AddEmbeddingProvider() + { + var dialogParameters = new DialogParameters + { + { x => x.IsEditing, false }, + }; + + var dialogReference = await this.DialogService.ShowAsync("Add Embedding Provider", dialogParameters, DialogOptions.FULLSCREEN); + var dialogResult = await dialogReference.Result; + if (dialogResult is null || dialogResult.Canceled) + return; + + var addedEmbedding = (EmbeddingProvider)dialogResult.Data!; + addedEmbedding = addedEmbedding with { Num = this.SettingsManager.ConfigurationData.NextEmbeddingNum++ }; + + this.SettingsManager.ConfigurationData.EmbeddingProviders.Add(addedEmbedding); + this.UpdateEmbeddingProviders(); + + await this.SettingsManager.StoreSettings(); + await this.MessageBus.SendMessage(this, Event.CONFIGURATION_CHANGED); + } + + private async Task EditEmbeddingProvider(EmbeddingProvider embeddingProvider) + { + var dialogParameters = new DialogParameters + { + { x => x.DataNum, embeddingProvider.Num }, + { x => x.DataId, embeddingProvider.Id }, + { x => x.DataName, embeddingProvider.Name }, + { x => x.DataLLMProvider, embeddingProvider.UsedLLMProvider }, + { x => x.DataModel, embeddingProvider.Model }, + { x => x.DataHostname, embeddingProvider.Hostname }, + { x => x.IsSelfHosted, embeddingProvider.IsSelfHosted }, + { x => x.IsEditing, true }, + { x => x.DataHost, embeddingProvider.Host }, + }; + + var dialogReference = await this.DialogService.ShowAsync("Edit Embedding Provider", dialogParameters, DialogOptions.FULLSCREEN); + var dialogResult = await dialogReference.Result; + if (dialogResult is null || dialogResult.Canceled) + return; + + var editedEmbeddingProvider = (EmbeddingProvider)dialogResult.Data!; + + // Set the provider number if it's not set. This is important for providers + // added before we started saving the provider number. + if(editedEmbeddingProvider.Num == 0) + editedEmbeddingProvider = editedEmbeddingProvider with { Num = this.SettingsManager.ConfigurationData.NextEmbeddingNum++ }; + + this.SettingsManager.ConfigurationData.EmbeddingProviders[this.SettingsManager.ConfigurationData.EmbeddingProviders.IndexOf(embeddingProvider)] = editedEmbeddingProvider; + this.UpdateEmbeddingProviders(); + + await this.SettingsManager.StoreSettings(); + await this.MessageBus.SendMessage(this, Event.CONFIGURATION_CHANGED); + } + + private async Task DeleteEmbeddingProvider(EmbeddingProvider provider) + { + var dialogParameters = new DialogParameters + { + { "Message", $"Are you sure you want to delete the embedding provider '{provider.Name}'?" }, + }; + + var dialogReference = await this.DialogService.ShowAsync("Delete Embedding Provider", dialogParameters, DialogOptions.FULLSCREEN); + var dialogResult = await dialogReference.Result; + if (dialogResult is null || dialogResult.Canceled) + return; + + var deleteSecretResponse = await this.RustService.DeleteAPIKey(provider); + if(deleteSecretResponse.Success) + { + this.SettingsManager.ConfigurationData.EmbeddingProviders.Remove(provider); + await this.SettingsManager.StoreSettings(); + } + + this.UpdateEmbeddingProviders(); + await this.MessageBus.SendMessage(this, Event.CONFIGURATION_CHANGED); + } + + private void UpdateEmbeddingProviders() + { + this.availableEmbeddingProviders.Clear(); + foreach (var provider in this.SettingsManager.ConfigurationData.EmbeddingProviders) + this.availableEmbeddingProviders.Add(new (provider.Name, provider.Id)); + } + + #endregion + #region Profile related private async Task AddProfile() diff --git a/app/MindWork AI Studio/Provider/Anthropic/ProviderAnthropic.cs b/app/MindWork AI Studio/Provider/Anthropic/ProviderAnthropic.cs index 8c7c9d7..dc9767d 100644 --- a/app/MindWork AI Studio/Provider/Anthropic/ProviderAnthropic.cs +++ b/app/MindWork AI Studio/Provider/Anthropic/ProviderAnthropic.cs @@ -7,7 +7,7 @@ using AIStudio.Provider.OpenAI; namespace AIStudio.Provider.Anthropic; -public sealed class ProviderAnthropic(ILogger logger) : BaseProvider("https://api.anthropic.com/v1/", logger), IProvider +public sealed class ProviderAnthropic(ILogger logger) : BaseProvider("https://api.anthropic.com/v1/", logger) { private static readonly JsonSerializerOptions JSON_SERIALIZER_OPTIONS = new() { @@ -16,12 +16,12 @@ public sealed class ProviderAnthropic(ILogger logger) : BaseProvider("https://ap #region Implementation of IProvider - public string Id => "Anthropic"; + public override string Id => LLMProviders.ANTHROPIC.ToName(); - public string InstanceName { get; set; } = "Anthropic"; + public override string InstanceName { get; set; } = "Anthropic"; /// - public async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default) + public override async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default) { // Get the API key: var requestedSecret = await RUST_SERVICE.GetAPIKey(this); @@ -136,14 +136,14 @@ public sealed class ProviderAnthropic(ILogger logger) : BaseProvider("https://ap #pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously /// - public async IAsyncEnumerable StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default) + public override async IAsyncEnumerable StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default) { yield break; } #pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously /// - public Task> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) + public override Task> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) { return Task.FromResult(new[] { @@ -162,13 +162,17 @@ public sealed class ProviderAnthropic(ILogger logger) : BaseProvider("https://ap }.AsEnumerable()); } - #pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously /// - public Task> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) + public override Task> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) + { + return Task.FromResult(Enumerable.Empty()); + } + + /// + public override Task> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default) { return Task.FromResult(Enumerable.Empty()); } - #pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously #endregion } \ No newline at end of file diff --git a/app/MindWork AI Studio/Provider/BaseProvider.cs b/app/MindWork AI Studio/Provider/BaseProvider.cs index 7e66fc1..e52d0c2 100644 --- a/app/MindWork AI Studio/Provider/BaseProvider.cs +++ b/app/MindWork AI Studio/Provider/BaseProvider.cs @@ -1,3 +1,5 @@ +using AIStudio.Chat; + using RustService = AIStudio.Tools.RustService; namespace AIStudio.Provider; @@ -5,10 +7,10 @@ namespace AIStudio.Provider; /// /// The base class for all providers. /// -public abstract class BaseProvider +public abstract class BaseProvider : IProvider, ISecretId { /// - /// The HTTP client to use for all requests. + /// The HTTP client to use it for all requests. /// protected readonly HttpClient httpClient = new(); @@ -39,4 +41,37 @@ public abstract class BaseProvider // Set the base URL: this.httpClient.BaseAddress = new(url); } + + #region Handling of IProvider, which all providers must implement + + /// + public abstract string Id { get; } + + /// + public abstract string InstanceName { get; set; } + + /// + public abstract IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, CancellationToken token = default); + + /// + public abstract IAsyncEnumerable StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, CancellationToken token = default); + + /// + public abstract Task> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default); + + /// + public abstract Task> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default); + + /// + public abstract Task> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default); + + #endregion + + #region Implementation of ISecretId + + public string SecretId => this.Id; + + public string SecretName => this.InstanceName; + + #endregion } \ No newline at end of file diff --git a/app/MindWork AI Studio/Provider/Fireworks/ProviderFireworks.cs b/app/MindWork AI Studio/Provider/Fireworks/ProviderFireworks.cs index a48582b..709aad1 100644 --- a/app/MindWork AI Studio/Provider/Fireworks/ProviderFireworks.cs +++ b/app/MindWork AI Studio/Provider/Fireworks/ProviderFireworks.cs @@ -7,7 +7,7 @@ using AIStudio.Chat; namespace AIStudio.Provider.Fireworks; -public class ProviderFireworks(ILogger logger) : BaseProvider("https://api.fireworks.ai/inference/v1/", logger), IProvider +public class ProviderFireworks(ILogger logger) : BaseProvider("https://api.fireworks.ai/inference/v1/", logger) { private static readonly JsonSerializerOptions JSON_SERIALIZER_OPTIONS = new() { @@ -17,13 +17,13 @@ public class ProviderFireworks(ILogger logger) : BaseProvider("https://api.firew #region Implementation of IProvider /// - public string Id => "Fireworks.ai"; + public override string Id => LLMProviders.FIREWORKS.ToName(); /// - public string InstanceName { get; set; } = "Fireworks.ai"; + public override string InstanceName { get; set; } = "Fireworks.ai"; /// - public async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default) + public override async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default) { // Get the API key: var requestedSecret = await RUST_SERVICE.GetAPIKey(this); @@ -138,20 +138,26 @@ public class ProviderFireworks(ILogger logger) : BaseProvider("https://api.firew #pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously /// - public async IAsyncEnumerable StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default) + public override async IAsyncEnumerable StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default) { yield break; } #pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously /// - public Task> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) + public override Task> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) { return Task.FromResult(Enumerable.Empty()); } /// - public Task> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) + public override Task> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) + { + return Task.FromResult(Enumerable.Empty()); + } + + /// + public override Task> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default) { return Task.FromResult(Enumerable.Empty()); } diff --git a/app/MindWork AI Studio/Provider/Google/ProviderGoogle.cs b/app/MindWork AI Studio/Provider/Google/ProviderGoogle.cs index 9e523dd..6ca6d92 100644 --- a/app/MindWork AI Studio/Provider/Google/ProviderGoogle.cs +++ b/app/MindWork AI Studio/Provider/Google/ProviderGoogle.cs @@ -8,7 +8,7 @@ using AIStudio.Provider.OpenAI; namespace AIStudio.Provider.Google; -public class ProviderGoogle(ILogger logger) : BaseProvider("https://generativelanguage.googleapis.com/v1beta/", logger), IProvider +public class ProviderGoogle(ILogger logger) : BaseProvider("https://generativelanguage.googleapis.com/v1beta/", logger) { private static readonly JsonSerializerOptions JSON_SERIALIZER_OPTIONS = new() { @@ -18,13 +18,13 @@ public class ProviderGoogle(ILogger logger) : BaseProvider("https://generativela #region Implementation of IProvider /// - public string Id => "Google"; + public override string Id => LLMProviders.GOOGLE.ToName(); /// - public string InstanceName { get; set; } = "Google Gemini"; + public override string InstanceName { get; set; } = "Google Gemini"; /// - public async IAsyncEnumerable StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default) + public override async IAsyncEnumerable StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default) { // Get the API key: var requestedSecret = await RUST_SERVICE.GetAPIKey(this); @@ -139,27 +139,44 @@ public class ProviderGoogle(ILogger logger) : BaseProvider("https://generativela #pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously /// - public async IAsyncEnumerable StreamImageCompletion(Provider.Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default) + public override async IAsyncEnumerable StreamImageCompletion(Provider.Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default) { yield break; } #pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously /// - public Task> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) + public override async Task> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) { - return this.LoadModels(token, apiKeyProvisional); + var modelResponse = await this.LoadModels(token, apiKeyProvisional); + if(modelResponse == default) + return []; + + return modelResponse.Models.Where(model => + model.Name.StartsWith("models/gemini-", StringComparison.InvariantCultureIgnoreCase)) + .Select(n => new Provider.Model(n.Name.Replace("models/", string.Empty), n.DisplayName)); } /// - public Task> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) + public override Task> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) { return Task.FromResult(Enumerable.Empty()); } + public override async Task> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default) + { + var modelResponse = await this.LoadModels(token, apiKeyProvisional); + if(modelResponse == default) + return []; + + return modelResponse.Models.Where(model => + model.Name.StartsWith("models/text-embedding-", StringComparison.InvariantCultureIgnoreCase)) + .Select(n => new Provider.Model(n.Name.Replace("models/", string.Empty), n.DisplayName)); + } + #endregion - private async Task> LoadModels(CancellationToken token, string? apiKeyProvisional = null) + private async Task LoadModels(CancellationToken token, string? apiKeyProvisional = null) { var secretKey = apiKeyProvisional switch { @@ -170,19 +187,17 @@ public class ProviderGoogle(ILogger logger) : BaseProvider("https://generativela _ => null, } }; - + if (secretKey is null) - return []; + return default; var request = new HttpRequestMessage(HttpMethod.Get, $"models?key={secretKey}"); var response = await this.httpClient.SendAsync(request, token); if(!response.IsSuccessStatusCode) - return []; + return default; var modelResponse = await response.Content.ReadFromJsonAsync(token); - return modelResponse.Models.Where(model => - model.Name.StartsWith("models/gemini-", StringComparison.InvariantCultureIgnoreCase)) - .Select(n => new Provider.Model(n.Name.Replace("models/", string.Empty), n.DisplayName)); + return modelResponse; } } \ No newline at end of file diff --git a/app/MindWork AI Studio/Provider/Groq/ProviderGroq.cs b/app/MindWork AI Studio/Provider/Groq/ProviderGroq.cs index 1340a3a..477f9a0 100644 --- a/app/MindWork AI Studio/Provider/Groq/ProviderGroq.cs +++ b/app/MindWork AI Studio/Provider/Groq/ProviderGroq.cs @@ -8,7 +8,7 @@ using AIStudio.Provider.OpenAI; namespace AIStudio.Provider.Groq; -public class ProviderGroq(ILogger logger) : BaseProvider("https://api.groq.com/openai/v1/", logger), IProvider +public class ProviderGroq(ILogger logger) : BaseProvider("https://api.groq.com/openai/v1/", logger) { private static readonly JsonSerializerOptions JSON_SERIALIZER_OPTIONS = new() { @@ -18,13 +18,13 @@ public class ProviderGroq(ILogger logger) : BaseProvider("https://api.groq.com/o #region Implementation of IProvider /// - public string Id => "Groq"; + public override string Id => LLMProviders.GROQ.ToName(); /// - public string InstanceName { get; set; } = "Groq"; + public override string InstanceName { get; set; } = "Groq"; /// - public async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default) + public override async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default) { // Get the API key: var requestedSecret = await RUST_SERVICE.GetAPIKey(this); @@ -141,23 +141,29 @@ public class ProviderGroq(ILogger logger) : BaseProvider("https://api.groq.com/o #pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously /// - public async IAsyncEnumerable StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default) + public override async IAsyncEnumerable StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default) { yield break; } #pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously /// - public Task> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) + public override Task> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) { return this.LoadModels(token, apiKeyProvisional); } /// - public Task> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) + public override Task> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) { return Task.FromResult>(Array.Empty()); } + + /// + public override Task> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default) + { + return Task.FromResult(Enumerable.Empty()); + } #endregion diff --git a/app/MindWork AI Studio/Provider/IProvider.cs b/app/MindWork AI Studio/Provider/IProvider.cs index ef3214b..69f6620 100644 --- a/app/MindWork AI Studio/Provider/IProvider.cs +++ b/app/MindWork AI Studio/Provider/IProvider.cs @@ -16,7 +16,7 @@ public interface IProvider /// The provider's instance name. Useful for multiple instances of the same provider, /// e.g., to distinguish between different OpenAI API keys. /// - public string InstanceName { get; set; } + public string InstanceName { get; } /// /// Starts a chat completion stream. @@ -53,4 +53,12 @@ public interface IProvider /// The cancellation token. /// The list of image models. public Task> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default); + + /// + /// Load all possible embedding models that can be used with this provider. + /// + /// The provisional API key to use. Useful when the user is adding a new provider. When null, the stored API key is used. + /// The cancellation token. + /// The list of embedding models. + public Task> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default); } \ No newline at end of file diff --git a/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs b/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs index c2420cc..a380542 100644 --- a/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs +++ b/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs @@ -7,6 +7,8 @@ using AIStudio.Provider.OpenAI; using AIStudio.Provider.SelfHosted; using AIStudio.Settings; +using Host = AIStudio.Provider.SelfHosted.Host; + namespace AIStudio.Provider; public static class LLMProvidersExtensions @@ -89,7 +91,7 @@ public static class LLMProvidersExtensions // // Self-hosted providers are treated as a special case anyway. // - LLMProviders.SELF_HOSTED => false, + LLMProviders.SELF_HOSTED => true, _ => false, }; @@ -101,20 +103,36 @@ public static class LLMProvidersExtensions /// The logger to use. /// The provider instance. public static IProvider CreateProvider(this Settings.Provider providerSettings, ILogger logger) + { + return providerSettings.UsedLLMProvider.CreateProvider(providerSettings.InstanceName, providerSettings.Host, providerSettings.Hostname, logger); + } + + /// + /// Creates a new provider instance based on the embedding provider value. + /// + /// The embedding provider settings. + /// The logger to use. + /// The provider instance. + public static IProvider CreateProvider(this EmbeddingProvider embeddingProviderSettings, ILogger logger) + { + return embeddingProviderSettings.UsedLLMProvider.CreateProvider(embeddingProviderSettings.Name, embeddingProviderSettings.Host, embeddingProviderSettings.Hostname, logger); + } + + private static IProvider CreateProvider(this LLMProviders provider, string instanceName, Host host, string hostname, ILogger logger) { try { - return providerSettings.UsedLLMProvider switch + return provider switch { - LLMProviders.OPEN_AI => new ProviderOpenAI(logger) { InstanceName = providerSettings.InstanceName }, - LLMProviders.ANTHROPIC => new ProviderAnthropic(logger) { InstanceName = providerSettings.InstanceName }, - LLMProviders.MISTRAL => new ProviderMistral(logger) { InstanceName = providerSettings.InstanceName }, - LLMProviders.GOOGLE => new ProviderGoogle(logger) { InstanceName = providerSettings.InstanceName }, + LLMProviders.OPEN_AI => new ProviderOpenAI(logger) { InstanceName = instanceName }, + LLMProviders.ANTHROPIC => new ProviderAnthropic(logger) { InstanceName = instanceName }, + LLMProviders.MISTRAL => new ProviderMistral(logger) { InstanceName = instanceName }, + LLMProviders.GOOGLE => new ProviderGoogle(logger) { InstanceName = instanceName }, - LLMProviders.GROQ => new ProviderGroq(logger) { InstanceName = providerSettings.InstanceName }, - LLMProviders.FIREWORKS => new ProviderFireworks(logger) { InstanceName = providerSettings.InstanceName }, + LLMProviders.GROQ => new ProviderGroq(logger) { InstanceName = instanceName }, + LLMProviders.FIREWORKS => new ProviderFireworks(logger) { InstanceName = instanceName }, - LLMProviders.SELF_HOSTED => new ProviderSelfHosted(logger, providerSettings) { InstanceName = providerSettings.InstanceName }, + LLMProviders.SELF_HOSTED => new ProviderSelfHosted(logger, host, hostname) { InstanceName = instanceName }, _ => new NoProvider(), }; @@ -125,4 +143,125 @@ public static class LLMProvidersExtensions return new NoProvider(); } } + + public static string GetCreationURL(this LLMProviders provider) => provider switch + { + LLMProviders.OPEN_AI => "https://platform.openai.com/signup", + LLMProviders.MISTRAL => "https://console.mistral.ai/", + LLMProviders.ANTHROPIC => "https://console.anthropic.com/dashboard", + LLMProviders.GOOGLE => "https://console.cloud.google.com/", + + LLMProviders.GROQ => "https://console.groq.com/", + LLMProviders.FIREWORKS => "https://fireworks.ai/login", + + _ => string.Empty, + }; + + public static string GetDashboardURL(this LLMProviders provider) => provider switch + { + LLMProviders.OPEN_AI => "https://platform.openai.com/usage", + LLMProviders.MISTRAL => "https://console.mistral.ai/usage/", + LLMProviders.ANTHROPIC => "https://console.anthropic.com/settings/plans", + LLMProviders.GROQ => "https://console.groq.com/settings/usage", + LLMProviders.GOOGLE => "https://console.cloud.google.com/billing", + LLMProviders.FIREWORKS => "https://fireworks.ai/account/billing", + + _ => string.Empty, + }; + + public static bool HasDashboard(this LLMProviders provider) => provider switch + { + LLMProviders.OPEN_AI => true, + LLMProviders.MISTRAL => true, + LLMProviders.ANTHROPIC => true, + LLMProviders.GROQ => true, + LLMProviders.FIREWORKS => true, + LLMProviders.GOOGLE => true, + + _ => false, + }; + + public static string GetModelsOverviewURL(this LLMProviders provider) => provider switch + { + LLMProviders.FIREWORKS => "https://fireworks.ai/models?show=Serverless", + _ => string.Empty, + }; + + public static bool IsLLMModelProvidedManually(this LLMProviders provider) => provider switch + { + LLMProviders.FIREWORKS => true, + _ => false, + }; + + public static bool IsEmbeddingModelProvidedManually(this LLMProviders provider, Host host) => provider switch + { + LLMProviders.SELF_HOSTED => host is not Host.LM_STUDIO, + _ => false, + }; + + public static bool IsHostNeeded(this LLMProviders provider) => provider switch + { + LLMProviders.SELF_HOSTED => true, + _ => false, + }; + + public static bool IsHostnameNeeded(this LLMProviders provider) => provider switch + { + LLMProviders.SELF_HOSTED => true, + _ => false, + }; + + public static bool IsAPIKeyNeeded(this LLMProviders provider, Host host) => provider switch + { + LLMProviders.OPEN_AI => true, + LLMProviders.MISTRAL => true, + LLMProviders.ANTHROPIC => true, + LLMProviders.GOOGLE => true, + + LLMProviders.GROQ => true, + LLMProviders.FIREWORKS => true, + + LLMProviders.SELF_HOSTED => host is Host.OLLAMA, + + _ => false, + }; + + public static bool ShowRegisterButton(this LLMProviders provider) => provider switch + { + LLMProviders.OPEN_AI => true, + LLMProviders.MISTRAL => true, + LLMProviders.ANTHROPIC => true, + LLMProviders.GOOGLE => true, + + LLMProviders.GROQ => true, + LLMProviders.FIREWORKS => true, + + _ => false, + }; + + public static bool CanLoadModels(this LLMProviders provider, Host host, string? apiKey) + { + if (provider is LLMProviders.SELF_HOSTED) + { + switch (host) + { + case Host.NONE: + case Host.LLAMACPP: + default: + return false; + + case Host.OLLAMA: + case Host.LM_STUDIO: + return true; + } + } + + if(provider is LLMProviders.NONE) + return false; + + if(string.IsNullOrWhiteSpace(apiKey)) + return false; + + return true; + } } \ No newline at end of file diff --git a/app/MindWork AI Studio/Provider/Mistral/ProviderMistral.cs b/app/MindWork AI Studio/Provider/Mistral/ProviderMistral.cs index 23296669..633fa94 100644 --- a/app/MindWork AI Studio/Provider/Mistral/ProviderMistral.cs +++ b/app/MindWork AI Studio/Provider/Mistral/ProviderMistral.cs @@ -8,7 +8,7 @@ using AIStudio.Provider.OpenAI; namespace AIStudio.Provider.Mistral; -public sealed class ProviderMistral(ILogger logger) : BaseProvider("https://api.mistral.ai/v1/", logger), IProvider +public sealed class ProviderMistral(ILogger logger) : BaseProvider("https://api.mistral.ai/v1/", logger) { private static readonly JsonSerializerOptions JSON_SERIALIZER_OPTIONS = new() { @@ -17,12 +17,12 @@ public sealed class ProviderMistral(ILogger logger) : BaseProvider("https://api. #region Implementation of IProvider - public string Id => "Mistral"; + public override string Id => LLMProviders.MISTRAL.ToName(); - public string InstanceName { get; set; } = "Mistral"; + public override string InstanceName { get; set; } = "Mistral"; /// - public async IAsyncEnumerable StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default) + public override async IAsyncEnumerable StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default) { // Get the API key: var requestedSecret = await RUST_SERVICE.GetAPIKey(this); @@ -140,14 +140,45 @@ public sealed class ProviderMistral(ILogger logger) : BaseProvider("https://api. #pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously /// - public async IAsyncEnumerable StreamImageCompletion(Provider.Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default) + public override async IAsyncEnumerable StreamImageCompletion(Provider.Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default) { yield break; } #pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously /// - public async Task> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) + public override async Task> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) + { + var modelResponse = await this.LoadModelList(apiKeyProvisional, token); + if(modelResponse == default) + return []; + + return modelResponse.Data.Where(n => + !n.Id.StartsWith("code", StringComparison.InvariantCulture) && + !n.Id.Contains("embed", StringComparison.InvariantCulture)) + .Select(n => new Provider.Model(n.Id, null)); + } + + /// + public override async Task> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default) + { + var modelResponse = await this.LoadModelList(apiKeyProvisional, token); + if(modelResponse == default) + return []; + + return modelResponse.Data.Where(n => n.Id.Contains("embed", StringComparison.InvariantCulture)) + .Select(n => new Provider.Model(n.Id, null)); + } + + /// + public override Task> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) + { + return Task.FromResult(Enumerable.Empty()); + } + + #endregion + + private async Task LoadModelList(string? apiKeyProvisional, CancellationToken token) { var secretKey = apiKeyProvisional switch { @@ -160,29 +191,16 @@ public sealed class ProviderMistral(ILogger logger) : BaseProvider("https://api. }; if (secretKey is null) - return []; + return default; var request = new HttpRequestMessage(HttpMethod.Get, "models"); request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey); var response = await this.httpClient.SendAsync(request, token); if(!response.IsSuccessStatusCode) - return []; + return default; var modelResponse = await response.Content.ReadFromJsonAsync(token); - return modelResponse.Data.Where(n => - !n.Id.StartsWith("code", StringComparison.InvariantCulture) && - !n.Id.Contains("embed", StringComparison.InvariantCulture)) - .Select(n => new Provider.Model(n.Id, null)); + return modelResponse; } - - #pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously - /// - public Task> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) - { - return Task.FromResult(Enumerable.Empty()); - } - #pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously - - #endregion } \ No newline at end of file diff --git a/app/MindWork AI Studio/Provider/NoProvider.cs b/app/MindWork AI Studio/Provider/NoProvider.cs index f6a6079..6efcc38 100644 --- a/app/MindWork AI Studio/Provider/NoProvider.cs +++ b/app/MindWork AI Studio/Provider/NoProvider.cs @@ -15,6 +15,8 @@ public class NoProvider : IProvider public Task> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) => Task.FromResult>([]); public Task> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) => Task.FromResult>([]); + + public Task> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default) => Task.FromResult>([]); public async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatChatThread, [EnumeratorCancellation] CancellationToken token = default) { diff --git a/app/MindWork AI Studio/Provider/OpenAI/ProviderOpenAI.cs b/app/MindWork AI Studio/Provider/OpenAI/ProviderOpenAI.cs index 3ca5ac0..2f1c25a 100644 --- a/app/MindWork AI Studio/Provider/OpenAI/ProviderOpenAI.cs +++ b/app/MindWork AI Studio/Provider/OpenAI/ProviderOpenAI.cs @@ -10,7 +10,7 @@ namespace AIStudio.Provider.OpenAI; /// /// The OpenAI provider. /// -public sealed class ProviderOpenAI(ILogger logger) : BaseProvider("https://api.openai.com/v1/", logger), IProvider +public sealed class ProviderOpenAI(ILogger logger) : BaseProvider("https://api.openai.com/v1/", logger) { private static readonly JsonSerializerOptions JSON_SERIALIZER_OPTIONS = new() { @@ -20,13 +20,13 @@ public sealed class ProviderOpenAI(ILogger logger) : BaseProvider("https://api.o #region Implementation of IProvider /// - public string Id => "OpenAI"; + public override string Id => LLMProviders.OPEN_AI.ToName(); /// - public string InstanceName { get; set; } = "OpenAI"; + public override string InstanceName { get; set; } = "OpenAI"; /// - public async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default) + public override async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default) { // Get the API key: var requestedSecret = await RUST_SERVICE.GetAPIKey(this); @@ -144,23 +144,29 @@ public sealed class ProviderOpenAI(ILogger logger) : BaseProvider("https://api.o #pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously /// - public async IAsyncEnumerable StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default) + public override async IAsyncEnumerable StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default) { yield break; } #pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously /// - public Task> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) + public override Task> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) { return this.LoadModels(["gpt-", "o1-"], token, apiKeyProvisional); } /// - public Task> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) + public override Task> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) { return this.LoadModels(["dall-e-"], token, apiKeyProvisional); } + + /// + public override Task> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default) + { + return this.LoadModels(["text-embedding-"], token, apiKeyProvisional); + } #endregion diff --git a/app/MindWork AI Studio/Provider/SelfHosted/Host.cs b/app/MindWork AI Studio/Provider/SelfHosted/Host.cs index 0e3a26d..c90ecc9 100644 --- a/app/MindWork AI Studio/Provider/SelfHosted/Host.cs +++ b/app/MindWork AI Studio/Provider/SelfHosted/Host.cs @@ -7,36 +7,4 @@ public enum Host LM_STUDIO, LLAMACPP, OLLAMA, -} - -public static class HostExtensions -{ - public static string Name(this Host host) => host switch - { - Host.NONE => "None", - - Host.LM_STUDIO => "LM Studio", - Host.LLAMACPP => "llama.cpp", - Host.OLLAMA => "ollama", - - _ => "Unknown", - }; - - public static string BaseURL(this Host host) => host switch - { - Host.LM_STUDIO => "/v1/", - Host.LLAMACPP => "/v1/", - Host.OLLAMA => "/v1/", - - _ => "/v1/", - }; - - public static string ChatURL(this Host host) => host switch - { - Host.LM_STUDIO => "chat/completions", - Host.LLAMACPP => "chat/completions", - Host.OLLAMA => "chat/completions", - - _ => "chat/completions", - }; } \ No newline at end of file diff --git a/app/MindWork AI Studio/Provider/SelfHosted/HostExtensions.cs b/app/MindWork AI Studio/Provider/SelfHosted/HostExtensions.cs new file mode 100644 index 0000000..ada7920 --- /dev/null +++ b/app/MindWork AI Studio/Provider/SelfHosted/HostExtensions.cs @@ -0,0 +1,47 @@ +namespace AIStudio.Provider.SelfHosted; + +public static class HostExtensions +{ + public static string Name(this Host host) => host switch + { + Host.NONE => "None", + + Host.LM_STUDIO => "LM Studio", + Host.LLAMACPP => "llama.cpp", + Host.OLLAMA => "ollama", + + _ => "Unknown", + }; + + public static string BaseURL(this Host host) => host switch + { + Host.LM_STUDIO => "/v1/", + Host.LLAMACPP => "/v1/", + Host.OLLAMA => "/v1/", + + _ => "/v1/", + }; + + public static string ChatURL(this Host host) => host switch + { + Host.LM_STUDIO => "chat/completions", + Host.LLAMACPP => "chat/completions", + Host.OLLAMA => "chat/completions", + + _ => "chat/completions", + }; + + public static bool AreEmbeddingsSupported(this Host host) + { + switch (host) + { + case Host.LM_STUDIO: + case Host.OLLAMA: + return true; + + default: + case Host.LLAMACPP: + return false; + } + } +} \ No newline at end of file diff --git a/app/MindWork AI Studio/Provider/SelfHosted/ProviderSelfHosted.cs b/app/MindWork AI Studio/Provider/SelfHosted/ProviderSelfHosted.cs index f50b34c..46958e9 100644 --- a/app/MindWork AI Studio/Provider/SelfHosted/ProviderSelfHosted.cs +++ b/app/MindWork AI Studio/Provider/SelfHosted/ProviderSelfHosted.cs @@ -8,7 +8,7 @@ using AIStudio.Provider.OpenAI; namespace AIStudio.Provider.SelfHosted; -public sealed class ProviderSelfHosted(ILogger logger, Settings.Provider provider) : BaseProvider($"{provider.Hostname}{provider.Host.BaseURL()}", logger), IProvider +public sealed class ProviderSelfHosted(ILogger logger, Host host, string hostname) : BaseProvider($"{hostname}{host.BaseURL()}", logger) { private static readonly JsonSerializerOptions JSON_SERIALIZER_OPTIONS = new() { @@ -17,12 +17,12 @@ public sealed class ProviderSelfHosted(ILogger logger, Settings.Provider provide #region Implementation of IProvider - public string Id => "Self-hosted"; + public override string Id => LLMProviders.SELF_HOSTED.ToName(); - public string InstanceName { get; set; } = "Self-hosted"; + public override string InstanceName { get; set; } = "Self-hosted"; /// - public async IAsyncEnumerable StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default) + public override async IAsyncEnumerable StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default) { // Get the API key: var requestedSecret = await RUST_SERVICE.GetAPIKey(this, isTrying: true); @@ -70,7 +70,7 @@ public sealed class ProviderSelfHosted(ILogger logger, Settings.Provider provide try { // Build the HTTP post request: - var request = new HttpRequestMessage(HttpMethod.Post, provider.Host.ChatURL()); + var request = new HttpRequestMessage(HttpMethod.Post, host.ChatURL()); // Set the authorization header: if (requestedSecret.Success) @@ -148,18 +148,18 @@ public sealed class ProviderSelfHosted(ILogger logger, Settings.Provider provide #pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously /// - public async IAsyncEnumerable StreamImageCompletion(Provider.Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default) + public override async IAsyncEnumerable StreamImageCompletion(Provider.Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default) { yield break; } #pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously - public async Task> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) + public override async Task> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) { try { - switch (provider.Host) + switch (host) { case Host.LLAMACPP: // Right now, llama.cpp only supports one model. @@ -168,27 +168,7 @@ public sealed class ProviderSelfHosted(ILogger logger, Settings.Provider provide case Host.LM_STUDIO: case Host.OLLAMA: - - var secretKey = apiKeyProvisional switch - { - not null => apiKeyProvisional, - _ => await RUST_SERVICE.GetAPIKey(this, isTrying: true) switch - { - { Success: true } result => await result.Secret.Decrypt(ENCRYPTION), - _ => null, - } - }; - - var lmStudioRequest = new HttpRequestMessage(HttpMethod.Get, "models"); - if(secretKey is not null) - lmStudioRequest.Headers.Authorization = new AuthenticationHeaderValue("Bearer", apiKeyProvisional); - - var lmStudioResponse = await this.httpClient.SendAsync(lmStudioRequest, token); - if(!lmStudioResponse.IsSuccessStatusCode) - return []; - - var lmStudioModelResponse = await lmStudioResponse.Content.ReadFromJsonAsync(token); - return lmStudioModelResponse.Data.Select(n => new Provider.Model(n.Id, null)); + return await this.LoadModels(["embed"], [], token, apiKeyProvisional); } return []; @@ -200,13 +180,58 @@ public sealed class ProviderSelfHosted(ILogger logger, Settings.Provider provide } } - #pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously /// - public Task> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) + public override Task> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) { return Task.FromResult(Enumerable.Empty()); } - #pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously + + public override async Task> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default) + { + try + { + switch (host) + { + case Host.LM_STUDIO: + case Host.OLLAMA: + return await this.LoadModels([], ["embed"], token, apiKeyProvisional); + } + + return []; + } + catch(Exception e) + { + this.logger.LogError($"Failed to load text models from self-hosted provider: {e.Message}"); + return []; + } + } #endregion + + private async Task> LoadModels(string[] ignorePhrases, string[] filterPhrases, CancellationToken token, string? apiKeyProvisional = null) + { + var secretKey = apiKeyProvisional switch + { + not null => apiKeyProvisional, + _ => await RUST_SERVICE.GetAPIKey(this, isTrying: true) switch + { + { Success: true } result => await result.Secret.Decrypt(ENCRYPTION), + _ => null, + } + }; + + var lmStudioRequest = new HttpRequestMessage(HttpMethod.Get, "models"); + if(secretKey is not null) + lmStudioRequest.Headers.Authorization = new AuthenticationHeaderValue("Bearer", apiKeyProvisional); + + var lmStudioResponse = await this.httpClient.SendAsync(lmStudioRequest, token); + if(!lmStudioResponse.IsSuccessStatusCode) + return []; + + var lmStudioModelResponse = await lmStudioResponse.Content.ReadFromJsonAsync(token); + return lmStudioModelResponse.Data. + Where(model => !ignorePhrases.Any(ignorePhrase => model.Id.Contains(ignorePhrase, StringComparison.InvariantCulture)) && + filterPhrases.All( filter => model.Id.Contains(filter, StringComparison.InvariantCulture))) + .Select(n => new Provider.Model(n.Id, null)); + } } \ No newline at end of file diff --git a/app/MindWork AI Studio/Settings/ConfigurationSelectData.cs b/app/MindWork AI Studio/Settings/ConfigurationSelectData.cs index faf6636..b5a6912 100644 --- a/app/MindWork AI Studio/Settings/ConfigurationSelectData.cs +++ b/app/MindWork AI Studio/Settings/ConfigurationSelectData.cs @@ -83,8 +83,9 @@ public static class ConfigurationSelectDataFactory yield return new("All preview features are hidden", PreviewVisibility.NONE); yield return new("Also show features ready for release; these should be stable", PreviewVisibility.RELEASE_CANDIDATE); yield return new("Also show features in beta: these are almost ready for release; expect some bugs", PreviewVisibility.BETA); - yield return new("Also show features in alpha: these are in early development; expect bugs and missing features", PreviewVisibility.ALPHA); + yield return new("Also show features in alpha: these are in development; expect bugs and missing features", PreviewVisibility.ALPHA); yield return new("Show also prototype features: these are works in progress; expect bugs and missing features", PreviewVisibility.PROTOTYPE); + yield return new("Show also experimental features: these are experimental; expect bugs, missing features, many changes", PreviewVisibility.EXPERIMENTAL); } public static IEnumerable> GetNavBehaviorData() diff --git a/app/MindWork AI Studio/Settings/DataModel/Data.cs b/app/MindWork AI Studio/Settings/DataModel/Data.cs index 59ceaae..e2b678e 100644 --- a/app/MindWork AI Studio/Settings/DataModel/Data.cs +++ b/app/MindWork AI Studio/Settings/DataModel/Data.cs @@ -20,6 +20,11 @@ public sealed class Data /// Settings concerning the LLM providers. /// public DataLLMProviders LLMProviders { get; init; } = new(); + + /// + /// A collection of embedding providers configured. + /// + public List EmbeddingProviders { get; init; } = []; /// /// List of configured profiles. @@ -31,6 +36,11 @@ public sealed class Data /// public uint NextProviderNum { get; set; } = 1; + /// + /// The next embedding number to use. + /// + public uint NextEmbeddingNum { get; set; } = 1; + /// /// The next profile number to use. /// diff --git a/app/MindWork AI Studio/Settings/DataModel/PreviewVisibility.cs b/app/MindWork AI Studio/Settings/DataModel/PreviewVisibility.cs index 67da294..7f5d5b9 100644 --- a/app/MindWork AI Studio/Settings/DataModel/PreviewVisibility.cs +++ b/app/MindWork AI Studio/Settings/DataModel/PreviewVisibility.cs @@ -8,4 +8,5 @@ public enum PreviewVisibility BETA, ALPHA, PROTOTYPE, + EXPERIMENTAL, } \ No newline at end of file diff --git a/app/MindWork AI Studio/Settings/EmbeddingProvider.cs b/app/MindWork AI Studio/Settings/EmbeddingProvider.cs new file mode 100644 index 0000000..126a0be --- /dev/null +++ b/app/MindWork AI Studio/Settings/EmbeddingProvider.cs @@ -0,0 +1,32 @@ +using System.Text.Json.Serialization; + +using AIStudio.Provider; + +using Host = AIStudio.Provider.SelfHosted.Host; + +namespace AIStudio.Settings; + +public readonly record struct EmbeddingProvider( + uint Num, + string Id, + string Name, + LLMProviders UsedLLMProvider, + Model Model, + bool IsSelfHosted = false, + string Hostname = "http://localhost:1234", + Host Host = Host.NONE) : ISecretId +{ + public override string ToString() => this.Name; + + #region Implementation of ISecretId + + /// + [JsonIgnore] + public string SecretId => this.Id; + + /// + [JsonIgnore] + public string SecretName => this.Name; + + #endregion +} \ No newline at end of file diff --git a/app/MindWork AI Studio/Settings/Provider.cs b/app/MindWork AI Studio/Settings/Provider.cs index 6279cf7..b349016 100644 --- a/app/MindWork AI Studio/Settings/Provider.cs +++ b/app/MindWork AI Studio/Settings/Provider.cs @@ -1,3 +1,5 @@ +using System.Text.Json.Serialization; + using AIStudio.Provider; using Host = AIStudio.Provider.SelfHosted.Host; @@ -22,7 +24,7 @@ public readonly record struct Provider( Model Model, bool IsSelfHosted = false, string Hostname = "http://localhost:1234", - Host Host = Host.NONE) + Host Host = Host.NONE) : ISecretId { #region Overrides of ValueType @@ -40,4 +42,16 @@ public readonly record struct Provider( } #endregion + + #region Implementation of ISecretId + + /// + [JsonIgnore] + public string SecretId => this.Id; + + /// + [JsonIgnore] + public string SecretName => this.InstanceName; + + #endregion } \ No newline at end of file diff --git a/app/MindWork AI Studio/Tools/ISecretId.cs b/app/MindWork AI Studio/Tools/ISecretId.cs new file mode 100644 index 0000000..c119891 --- /dev/null +++ b/app/MindWork AI Studio/Tools/ISecretId.cs @@ -0,0 +1,17 @@ +namespace AIStudio.Tools; + +/// +/// Represents an interface defining a secret identifier. +/// +public interface ISecretId +{ + /// + /// The unique ID of the secret. + /// + public string SecretId { get; } + + /// + /// The instance name of the secret. + /// + public string SecretName { get; } +} \ No newline at end of file diff --git a/app/MindWork AI Studio/Tools/RustService.cs b/app/MindWork AI Studio/Tools/RustService.cs index bf40e0d..a38cf5f 100644 --- a/app/MindWork AI Studio/Tools/RustService.cs +++ b/app/MindWork AI Studio/Tools/RustService.cs @@ -1,7 +1,6 @@ using System.Security.Cryptography; using System.Text.Json; -using AIStudio.Provider; using AIStudio.Tools.Rust; // ReSharper disable NotAccessedPositionalProperty.Local @@ -255,71 +254,71 @@ public sealed class RustService : IDisposable } /// - /// Try to get the API key for the given provider. + /// Try to get the API key for the given secret ID. /// - /// The provider to get the API key for. + /// The secret ID to get the API key for. /// Indicates if we are trying to get the API key. In that case, we don't log errors. /// The requested secret. - public async Task GetAPIKey(IProvider provider, bool isTrying = false) + public async Task GetAPIKey(ISecretId secretId, bool isTrying = false) { - var secretRequest = new SelectSecretRequest($"provider::{provider.Id}::{provider.InstanceName}::api_key", Environment.UserName, isTrying); + var secretRequest = new SelectSecretRequest($"provider::{secretId.SecretId}::{secretId.SecretName}::api_key", Environment.UserName, isTrying); var result = await this.http.PostAsJsonAsync("/secrets/get", secretRequest, this.jsonRustSerializerOptions); if (!result.IsSuccessStatusCode) { if(!isTrying) - this.logger!.LogError($"Failed to get the API key for provider '{provider.Id}' due to an API issue: '{result.StatusCode}'"); + this.logger!.LogError($"Failed to get the API key for secret ID '{secretId.SecretId}' due to an API issue: '{result.StatusCode}'"); return new RequestedSecret(false, new EncryptedText(string.Empty), "Failed to get the API key due to an API issue."); } var secret = await result.Content.ReadFromJsonAsync(this.jsonRustSerializerOptions); if (!secret.Success && !isTrying) - this.logger!.LogError($"Failed to get the API key for provider '{provider.Id}': '{secret.Issue}'"); + this.logger!.LogError($"Failed to get the API key for secret ID '{secretId.SecretId}': '{secret.Issue}'"); return secret; } /// - /// Try to store the API key for the given provider. + /// Try to store the API key for the given secret ID. /// - /// The provider to store the API key for. + /// The secret ID to store the API key for. /// The API key to store. /// The store secret response. - public async Task SetAPIKey(IProvider provider, string key) + public async Task SetAPIKey(ISecretId secretId, string key) { var encryptedKey = await this.encryptor!.Encrypt(key); - var request = new StoreSecretRequest($"provider::{provider.Id}::{provider.InstanceName}::api_key", Environment.UserName, encryptedKey); + var request = new StoreSecretRequest($"provider::{secretId.SecretId}::{secretId.SecretName}::api_key", Environment.UserName, encryptedKey); var result = await this.http.PostAsJsonAsync("/secrets/store", request, this.jsonRustSerializerOptions); if (!result.IsSuccessStatusCode) { - this.logger!.LogError($"Failed to store the API key for provider '{provider.Id}' due to an API issue: '{result.StatusCode}'"); + this.logger!.LogError($"Failed to store the API key for secret ID '{secretId.SecretId}' due to an API issue: '{result.StatusCode}'"); return new StoreSecretResponse(false, "Failed to get the API key due to an API issue."); } var state = await result.Content.ReadFromJsonAsync(this.jsonRustSerializerOptions); if (!state.Success) - this.logger!.LogError($"Failed to store the API key for provider '{provider.Id}': '{state.Issue}'"); + this.logger!.LogError($"Failed to store the API key for secret ID '{secretId.SecretId}': '{state.Issue}'"); return state; } /// - /// Tries to delete the API key for the given provider. + /// Tries to delete the API key for the given secret ID. /// - /// The provider to delete the API key for. + /// The secret ID to delete the API key for. /// The delete secret response. - public async Task DeleteAPIKey(IProvider provider) + public async Task DeleteAPIKey(ISecretId secretId) { - var request = new SelectSecretRequest($"provider::{provider.Id}::{provider.InstanceName}::api_key", Environment.UserName, false); + var request = new SelectSecretRequest($"provider::{secretId.SecretId}::{secretId.SecretName}::api_key", Environment.UserName, false); var result = await this.http.PostAsJsonAsync("/secrets/delete", request, this.jsonRustSerializerOptions); if (!result.IsSuccessStatusCode) { - this.logger!.LogError($"Failed to delete the API key for provider '{provider.Id}' due to an API issue: '{result.StatusCode}'"); + this.logger!.LogError($"Failed to delete the API key for secret ID '{secretId.SecretId}' due to an API issue: '{result.StatusCode}'"); return new DeleteSecretResponse{Success = false, WasEntryFound = false, Issue = "Failed to delete the API key due to an API issue."}; } var state = await result.Content.ReadFromJsonAsync(this.jsonRustSerializerOptions); if (!state.Success) - this.logger!.LogError($"Failed to delete the API key for provider '{provider.Id}': '{state.Issue}'"); + this.logger!.LogError($"Failed to delete the API key for secret ID '{secretId.SecretId}': '{state.Issue}'"); return state; } diff --git a/app/MindWork AI Studio/Tools/Validation/ProviderValidation.cs b/app/MindWork AI Studio/Tools/Validation/ProviderValidation.cs new file mode 100644 index 0000000..be7b16b --- /dev/null +++ b/app/MindWork AI Studio/Tools/Validation/ProviderValidation.cs @@ -0,0 +1,96 @@ +using AIStudio.Provider; + +using Host = AIStudio.Provider.SelfHosted.Host; + +namespace AIStudio.Tools.Validation; + +public sealed class ProviderValidation +{ + public Func GetProvider { get; init; } = () => LLMProviders.NONE; + + public Func GetAPIKeyStorageIssue { get; init; } = () => string.Empty; + + public Func GetPreviousInstanceName { get; init; } = () => string.Empty; + + public Func> GetUsedInstanceNames { get; init; } = () => []; + + public Func GetHost { get; init; } = () => Host.NONE; + + public string? ValidatingHostname(string hostname) + { + if(this.GetProvider() != LLMProviders.SELF_HOSTED) + return null; + + if(string.IsNullOrWhiteSpace(hostname)) + return "Please enter a hostname, e.g., http://localhost:1234"; + + if(!hostname.StartsWith("http://", StringComparison.InvariantCultureIgnoreCase) && !hostname.StartsWith("https://", StringComparison.InvariantCultureIgnoreCase)) + return "The hostname must start with either http:// or https://"; + + if(!Uri.TryCreate(hostname, UriKind.Absolute, out _)) + return "The hostname is not a valid HTTP(S) URL."; + + return null; + } + + public string? ValidatingAPIKey(string apiKey) + { + if(this.GetProvider() is LLMProviders.SELF_HOSTED) + return null; + + var apiKeyStorageIssue = this.GetAPIKeyStorageIssue(); + if(!string.IsNullOrWhiteSpace(apiKeyStorageIssue)) + return apiKeyStorageIssue; + + if(string.IsNullOrWhiteSpace(apiKey)) + return "Please enter an API key."; + + return null; + } + + public string? ValidatingInstanceName(string instanceName) + { + if (string.IsNullOrWhiteSpace(instanceName)) + return "Please enter an instance name."; + + if (instanceName.Length > 40) + return "The instance name must not exceed 40 characters."; + + // The instance name must be unique: + var lowerInstanceName = instanceName.ToLowerInvariant(); + if (lowerInstanceName != this.GetPreviousInstanceName() && this.GetUsedInstanceNames().Contains(lowerInstanceName)) + return "The instance name must be unique; the chosen name is already in use."; + + return null; + } + + public string? ValidatingModel(Model model) + { + if(this.GetProvider() is LLMProviders.SELF_HOSTED && this.GetHost() == Host.LLAMACPP) + return null; + + if (model == default) + return "Please select a model."; + + return null; + } + + public string? ValidatingProvider(LLMProviders llmProvider) + { + if (llmProvider == LLMProviders.NONE) + return "Please select a provider."; + + return null; + } + + public string? ValidatingHost(Host host) + { + if(this.GetProvider() is not LLMProviders.SELF_HOSTED) + return null; + + if (host == Host.NONE) + return "Please select a host."; + + return null; + } +} \ No newline at end of file diff --git a/app/MindWork AI Studio/wwwroot/changelog/v0.9.22.md b/app/MindWork AI Studio/wwwroot/changelog/v0.9.22.md index df1cdfe..cb4c863 100644 --- a/app/MindWork AI Studio/wwwroot/changelog/v0.9.22.md +++ b/app/MindWork AI Studio/wwwroot/changelog/v0.9.22.md @@ -1,2 +1,4 @@ # v0.9.22, build 197 (2024-1x-xx xx:xx UTC) -- Added the possibility to configure preview feature visibility in the app settings. This is useful for users who want to test new features before they are officially released. \ No newline at end of file +- Added the possibility to configure preview feature visibility in the app settings. This is useful for users who want to test new features before they are officially released. +- Added the possibility to configure embedding providers in the app settings. Embeddings are necessary in order to integrate local data and files. +- Improved self-hosted LLM provider configuration by filtering embedding models. \ No newline at end of file