mirror of
https://github.com/MindWorkAI/AI-Studio.git
synced 2026-02-15 06:01:37 +00:00
Renamed LLAMA_CPP enum value
This commit is contained in:
parent
cca25d33b7
commit
21ceb70fd7
@ -34,7 +34,7 @@
|
|||||||
{
|
{
|
||||||
@GetLLMProviderModelName(context)
|
@GetLLMProviderModelName(context)
|
||||||
}
|
}
|
||||||
else if (context.UsedLLMProvider is LLMProviders.SELF_HOSTED && context.Host is not Host.LLAMACPP)
|
else if (context.UsedLLMProvider is LLMProviders.SELF_HOSTED && context.Host is not Host.LLAMA_CPP)
|
||||||
{
|
{
|
||||||
@GetLLMProviderModelName(context)
|
@GetLLMProviderModelName(context)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -338,7 +338,7 @@ public static class LLMProvidersExtensions
|
|||||||
switch (host)
|
switch (host)
|
||||||
{
|
{
|
||||||
case Host.NONE:
|
case Host.NONE:
|
||||||
case Host.LLAMACPP:
|
case Host.LLAMA_CPP:
|
||||||
default:
|
default:
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
|
|||||||
@ -5,7 +5,7 @@ public enum Host
|
|||||||
NONE,
|
NONE,
|
||||||
|
|
||||||
LM_STUDIO,
|
LM_STUDIO,
|
||||||
LLAMACPP,
|
LLAMA_CPP,
|
||||||
OLLAMA,
|
OLLAMA,
|
||||||
VLLM,
|
VLLM,
|
||||||
}
|
}
|
||||||
@ -7,7 +7,7 @@ public static class HostExtensions
|
|||||||
Host.NONE => "None",
|
Host.NONE => "None",
|
||||||
|
|
||||||
Host.LM_STUDIO => "LM Studio",
|
Host.LM_STUDIO => "LM Studio",
|
||||||
Host.LLAMACPP => "llama.cpp",
|
Host.LLAMA_CPP => "llama.cpp",
|
||||||
Host.OLLAMA => "ollama",
|
Host.OLLAMA => "ollama",
|
||||||
Host.VLLM => "vLLM",
|
Host.VLLM => "vLLM",
|
||||||
|
|
||||||
@ -34,7 +34,7 @@ public static class HostExtensions
|
|||||||
return true;
|
return true;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
case Host.LLAMACPP:
|
case Host.LLAMA_CPP:
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -91,7 +91,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
|
|||||||
{
|
{
|
||||||
switch (host)
|
switch (host)
|
||||||
{
|
{
|
||||||
case Host.LLAMACPP:
|
case Host.LLAMA_CPP:
|
||||||
// Right now, llama.cpp only supports one model.
|
// Right now, llama.cpp only supports one model.
|
||||||
// There is no API to list the model(s).
|
// There is no API to list the model(s).
|
||||||
return [ new Provider.Model("as configured by llama.cpp", null) ];
|
return [ new Provider.Model("as configured by llama.cpp", null) ];
|
||||||
|
|||||||
@ -70,7 +70,7 @@ public sealed class ProviderValidation
|
|||||||
|
|
||||||
public string? ValidatingModel(Model model)
|
public string? ValidatingModel(Model model)
|
||||||
{
|
{
|
||||||
if(this.GetProvider() is LLMProviders.SELF_HOSTED && this.GetHost() == Host.LLAMACPP)
|
if(this.GetProvider() is LLMProviders.SELF_HOSTED && this.GetHost() == Host.LLAMA_CPP)
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
if (model == default)
|
if (model == default)
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user