diff --git a/app/MindWork AI Studio/Components/Settings/SettingsPanelProviders.razor b/app/MindWork AI Studio/Components/Settings/SettingsPanelProviders.razor index 64990f2a..64c3981a 100644 --- a/app/MindWork AI Studio/Components/Settings/SettingsPanelProviders.razor +++ b/app/MindWork AI Studio/Components/Settings/SettingsPanelProviders.razor @@ -34,7 +34,7 @@ { @GetLLMProviderModelName(context) } - else if (context.UsedLLMProvider is LLMProviders.SELF_HOSTED && context.Host is not Host.LLAMACPP) + else if (context.UsedLLMProvider is LLMProviders.SELF_HOSTED && context.Host is not Host.LLAMA_CPP) { @GetLLMProviderModelName(context) } diff --git a/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs b/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs index 91dfba8e..5556aa92 100644 --- a/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs +++ b/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs @@ -338,7 +338,7 @@ public static class LLMProvidersExtensions switch (host) { case Host.NONE: - case Host.LLAMACPP: + case Host.LLAMA_CPP: default: return false; diff --git a/app/MindWork AI Studio/Provider/SelfHosted/Host.cs b/app/MindWork AI Studio/Provider/SelfHosted/Host.cs index d922ccd5..ddb5738f 100644 --- a/app/MindWork AI Studio/Provider/SelfHosted/Host.cs +++ b/app/MindWork AI Studio/Provider/SelfHosted/Host.cs @@ -5,7 +5,7 @@ public enum Host NONE, LM_STUDIO, - LLAMACPP, + LLAMA_CPP, OLLAMA, VLLM, } \ No newline at end of file diff --git a/app/MindWork AI Studio/Provider/SelfHosted/HostExtensions.cs b/app/MindWork AI Studio/Provider/SelfHosted/HostExtensions.cs index 3478d9e5..a0f56ab3 100644 --- a/app/MindWork AI Studio/Provider/SelfHosted/HostExtensions.cs +++ b/app/MindWork AI Studio/Provider/SelfHosted/HostExtensions.cs @@ -7,7 +7,7 @@ public static class HostExtensions Host.NONE => "None", Host.LM_STUDIO => "LM Studio", - Host.LLAMACPP => "llama.cpp", + Host.LLAMA_CPP => "llama.cpp", Host.OLLAMA => "ollama", Host.VLLM => "vLLM", @@ -34,7 +34,7 @@ public static class HostExtensions return true; default: - case Host.LLAMACPP: + case Host.LLAMA_CPP: return false; } } diff --git a/app/MindWork AI Studio/Provider/SelfHosted/ProviderSelfHosted.cs b/app/MindWork AI Studio/Provider/SelfHosted/ProviderSelfHosted.cs index 70228589..6f43d9ce 100644 --- a/app/MindWork AI Studio/Provider/SelfHosted/ProviderSelfHosted.cs +++ b/app/MindWork AI Studio/Provider/SelfHosted/ProviderSelfHosted.cs @@ -91,7 +91,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide { switch (host) { - case Host.LLAMACPP: + case Host.LLAMA_CPP: // Right now, llama.cpp only supports one model. // There is no API to list the model(s). return [ new Provider.Model("as configured by llama.cpp", null) ]; diff --git a/app/MindWork AI Studio/Tools/Validation/ProviderValidation.cs b/app/MindWork AI Studio/Tools/Validation/ProviderValidation.cs index aa6217b0..3b074ba9 100644 --- a/app/MindWork AI Studio/Tools/Validation/ProviderValidation.cs +++ b/app/MindWork AI Studio/Tools/Validation/ProviderValidation.cs @@ -70,7 +70,7 @@ public sealed class ProviderValidation public string? ValidatingModel(Model model) { - if(this.GetProvider() is LLMProviders.SELF_HOSTED && this.GetHost() == Host.LLAMACPP) + if(this.GetProvider() is LLMProviders.SELF_HOSTED && this.GetHost() == Host.LLAMA_CPP) return null; if (model == default)