mirror of
https://github.com/MindWorkAI/AI-Studio.git
synced 2025-04-28 07:19:47 +00:00
Rename HFInstanceProvider
to HFInferenceProvider
This commit is contained in:
parent
bc0e9f58f2
commit
37122fe7f7
@ -65,7 +65,7 @@ public partial class SettingsPanelProviders : SettingsPanelBase
|
|||||||
{ x => x.IsSelfHosted, provider.IsSelfHosted },
|
{ x => x.IsSelfHosted, provider.IsSelfHosted },
|
||||||
{ x => x.IsEditing, true },
|
{ x => x.IsEditing, true },
|
||||||
{ x => x.DataHost, provider.Host },
|
{ x => x.DataHost, provider.Host },
|
||||||
{ x => x.HfInstanceProviderId, provider.HFInstanceProvider },
|
{ x => x.HFInferenceProviderId, provider.HFInferenceProvider },
|
||||||
};
|
};
|
||||||
|
|
||||||
var dialogReference = await this.DialogService.ShowAsync<ProviderDialog>("Edit LLM Provider", dialogParameters, DialogOptions.FULLSCREEN);
|
var dialogReference = await this.DialogService.ShowAsync<ProviderDialog>("Edit LLM Provider", dialogParameters, DialogOptions.FULLSCREEN);
|
||||||
|
@ -61,20 +61,22 @@
|
|||||||
|
|
||||||
@if (this.DataLLMProvider.IsHFInstanceProviderNeeded())
|
@if (this.DataLLMProvider.IsHFInstanceProviderNeeded())
|
||||||
{
|
{
|
||||||
<MudSelect Disabled="@(!this.DataLLMProvider.IsHFInstanceProviderNeeded())" @bind-Value="@this.HfInstanceProviderId" Label="HF Instance Provider" Class="mb-3" OpenIcon="@Icons.Material.Filled.Dns" AdornmentColor="Color.Info" Adornment="Adornment.Start" Validation="@this.providerValidation.ValidatingHFInstanceProvider">
|
<MudSelect Disabled="@(!this.DataLLMProvider.IsHFInstanceProviderNeeded())" @bind-Value="@this.HFInferenceProviderId" Label="HF Inference Provider" Class="mb-3" OpenIcon="@Icons.Material.Filled.Dns" AdornmentColor="Color.Info" Adornment="Adornment.Start" Validation="@this.providerValidation.ValidatingHFInstanceProvider">
|
||||||
@foreach (HFInstanceProvider instanceProvider in Enum.GetValues(typeof(HFInstanceProvider)))
|
@foreach (HFInferenceProvider inferenceProvider in Enum.GetValues(typeof(HFInferenceProvider)))
|
||||||
{
|
{
|
||||||
<MudSelectItem Value="@instanceProvider">@instanceProvider.ToName()</MudSelectItem>
|
<MudSelectItem Value="@inferenceProvider">
|
||||||
|
@inferenceProvider.ToName()
|
||||||
|
</MudSelectItem>
|
||||||
}
|
}
|
||||||
</MudSelect>
|
</MudSelect>
|
||||||
|
|
||||||
<MudJustifiedText Class="mb-3"> Please double-check if your model name matches the curl specifications provided by the instance provider. If it doesn't, you might get a <b>Not Found</b> error when trying to use the model. Here's a <MudLink Href="https://huggingface.co/meta-llama/Llama-3.1-8B-Instruct?inference_api=true&inference_provider=novita&language=sh" Target="_blank">curl example</MudLink>.</MudJustifiedText>
|
<MudJustifiedText Class="mb-3"> Please double-check if your model name matches the curl specifications provided by the inference provider. If it doesn't, you might get a <b>Not Found</b> error when trying to use the model. Here's a <MudLink Href="https://huggingface.co/meta-llama/Llama-3.1-8B-Instruct?inference_api=true&inference_provider=novita&language=sh" Target="_blank">curl example</MudLink>.</MudJustifiedText>
|
||||||
}
|
}
|
||||||
|
|
||||||
<MudStack Row="@true" AlignItems="AlignItems.Center">
|
<MudStack Row="@true" AlignItems="AlignItems.Center">
|
||||||
@if (this.DataLLMProvider.IsLLMModelProvidedManually())
|
@if (this.DataLLMProvider.IsLLMModelProvidedManually())
|
||||||
{
|
{
|
||||||
<MudButton Variant="Variant.Filled" Size="Size.Small" StartIcon="@Icons.Material.Filled.OpenInBrowser" Href="@this.DataLLMProvider.GetModelsOverviewURL(this.HfInstanceProviderId)" Target="_blank">Show available models</MudButton>
|
<MudButton Variant="Variant.Filled" Size="Size.Small" StartIcon="@Icons.Material.Filled.OpenInBrowser" Href="@this.DataLLMProvider.GetModelsOverviewURL(this.HFInferenceProviderId)" Target="_blank">Show available models</MudButton>
|
||||||
<MudTextField
|
<MudTextField
|
||||||
T="string"
|
T="string"
|
||||||
@bind-Text="@this.dataManuallyModel"
|
@bind-Text="@this.dataManuallyModel"
|
||||||
|
@ -52,7 +52,7 @@ public partial class ProviderDialog : ComponentBase, ISecretId
|
|||||||
/// The HFInstanceProvider to use, e.g., CEREBRAS.
|
/// The HFInstanceProvider to use, e.g., CEREBRAS.
|
||||||
/// </summary>
|
/// </summary>
|
||||||
[Parameter]
|
[Parameter]
|
||||||
public HFInstanceProvider HfInstanceProviderId { get; set; } = HFInstanceProvider.NONE;
|
public HFInferenceProvider HFInferenceProviderId { get; set; } = HFInferenceProvider.NONE;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Is this provider self-hosted?
|
/// Is this provider self-hosted?
|
||||||
@ -138,7 +138,7 @@ public partial class ProviderDialog : ComponentBase, ISecretId
|
|||||||
IsSelfHosted = this.DataLLMProvider is LLMProviders.SELF_HOSTED,
|
IsSelfHosted = this.DataLLMProvider is LLMProviders.SELF_HOSTED,
|
||||||
Hostname = cleanedHostname.EndsWith('/') ? cleanedHostname[..^1] : cleanedHostname,
|
Hostname = cleanedHostname.EndsWith('/') ? cleanedHostname[..^1] : cleanedHostname,
|
||||||
Host = this.DataHost,
|
Host = this.DataHost,
|
||||||
HFInstanceProvider = this.HfInstanceProviderId,
|
HFInferenceProvider = this.HFInferenceProviderId,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
namespace AIStudio.Provider.HuggingFace;
|
namespace AIStudio.Provider.HuggingFace;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Enum for instance providers that Hugging Face supports.
|
/// Enum for inference providers that Hugging Face supports.
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public enum HFInstanceProvider
|
public enum HFInferenceProvider
|
||||||
{
|
{
|
||||||
NONE,
|
NONE,
|
||||||
|
|
@ -0,0 +1,43 @@
|
|||||||
|
namespace AIStudio.Provider.HuggingFace;
|
||||||
|
|
||||||
|
public static class HFInferenceProviderExtensions
|
||||||
|
{
|
||||||
|
public static string Endpoints(this HFInferenceProvider provider, Model model) => provider switch
|
||||||
|
{
|
||||||
|
HFInferenceProvider.CEREBRAS => "cerebras/v1/",
|
||||||
|
HFInferenceProvider.NEBIUS_AI_STUDIO => "nebius/v1/",
|
||||||
|
HFInferenceProvider.SAMBANOVA => "sambanova/v1/",
|
||||||
|
HFInferenceProvider.NOVITA => "novita/v3/openai/",
|
||||||
|
HFInferenceProvider.HYPERBOLIC => "hyperbolic/v1/",
|
||||||
|
HFInferenceProvider.TOGETHER_AI => "together/v1/",
|
||||||
|
HFInferenceProvider.FIREWORKS => "fireworks-ai/inference/v1/",
|
||||||
|
HFInferenceProvider.HF_INFERENCE_API => $"hf-inference/models/{model.ToString()}/v1/",
|
||||||
|
_ => string.Empty,
|
||||||
|
};
|
||||||
|
|
||||||
|
public static string EndpointsId(this HFInferenceProvider provider) => provider switch
|
||||||
|
{
|
||||||
|
HFInferenceProvider.CEREBRAS => "cerebras",
|
||||||
|
HFInferenceProvider.NEBIUS_AI_STUDIO => "nebius",
|
||||||
|
HFInferenceProvider.SAMBANOVA => "sambanova",
|
||||||
|
HFInferenceProvider.NOVITA => "novita",
|
||||||
|
HFInferenceProvider.HYPERBOLIC => "hyperbolic",
|
||||||
|
HFInferenceProvider.TOGETHER_AI => "together",
|
||||||
|
HFInferenceProvider.FIREWORKS => "fireworks",
|
||||||
|
HFInferenceProvider.HF_INFERENCE_API => "hf-inference",
|
||||||
|
_ => string.Empty,
|
||||||
|
};
|
||||||
|
|
||||||
|
public static string ToName(this HFInferenceProvider provider) => provider switch
|
||||||
|
{
|
||||||
|
HFInferenceProvider.CEREBRAS => "Cerebras",
|
||||||
|
HFInferenceProvider.NEBIUS_AI_STUDIO => "Nebius AI Studio",
|
||||||
|
HFInferenceProvider.SAMBANOVA => "Sambanova",
|
||||||
|
HFInferenceProvider.NOVITA => "Novita",
|
||||||
|
HFInferenceProvider.HYPERBOLIC => "Hyperbolic",
|
||||||
|
HFInferenceProvider.TOGETHER_AI => "Together AI",
|
||||||
|
HFInferenceProvider.FIREWORKS => "Fireworks AI",
|
||||||
|
HFInferenceProvider.HF_INFERENCE_API => "Hugging Face Inference API",
|
||||||
|
_ => string.Empty,
|
||||||
|
};
|
||||||
|
}
|
@ -1,43 +0,0 @@
|
|||||||
namespace AIStudio.Provider.HuggingFace;
|
|
||||||
|
|
||||||
public static class HFInstanceProviderExtensions
|
|
||||||
{
|
|
||||||
public static string Endpoints(this HFInstanceProvider provider, Model model) => provider switch
|
|
||||||
{
|
|
||||||
HFInstanceProvider.CEREBRAS => "cerebras/v1/",
|
|
||||||
HFInstanceProvider.NEBIUS_AI_STUDIO => "nebius/v1/",
|
|
||||||
HFInstanceProvider.SAMBANOVA => "sambanova/v1/",
|
|
||||||
HFInstanceProvider.NOVITA => "novita/v3/openai/",
|
|
||||||
HFInstanceProvider.HYPERBOLIC => "hyperbolic/v1/",
|
|
||||||
HFInstanceProvider.TOGETHER_AI => "together/v1/",
|
|
||||||
HFInstanceProvider.FIREWORKS => "fireworks-ai/inference/v1/",
|
|
||||||
HFInstanceProvider.HF_INFERENCE_API => $"hf-inference/models/{model.ToString()}/v1/",
|
|
||||||
_ => string.Empty,
|
|
||||||
};
|
|
||||||
|
|
||||||
public static string EndpointsId(this HFInstanceProvider provider) => provider switch
|
|
||||||
{
|
|
||||||
HFInstanceProvider.CEREBRAS => "cerebras",
|
|
||||||
HFInstanceProvider.NEBIUS_AI_STUDIO => "nebius",
|
|
||||||
HFInstanceProvider.SAMBANOVA => "sambanova",
|
|
||||||
HFInstanceProvider.NOVITA => "novita",
|
|
||||||
HFInstanceProvider.HYPERBOLIC => "hyperbolic",
|
|
||||||
HFInstanceProvider.TOGETHER_AI => "together",
|
|
||||||
HFInstanceProvider.FIREWORKS => "fireworks",
|
|
||||||
HFInstanceProvider.HF_INFERENCE_API => "hf-inference",
|
|
||||||
_ => string.Empty,
|
|
||||||
};
|
|
||||||
|
|
||||||
public static string ToName(this HFInstanceProvider provider) => provider switch
|
|
||||||
{
|
|
||||||
HFInstanceProvider.CEREBRAS => "Cerebras",
|
|
||||||
HFInstanceProvider.NEBIUS_AI_STUDIO => "Nebius AI Studio",
|
|
||||||
HFInstanceProvider.SAMBANOVA => "Sambanova",
|
|
||||||
HFInstanceProvider.NOVITA => "Novita",
|
|
||||||
HFInstanceProvider.HYPERBOLIC => "Hyperbolic",
|
|
||||||
HFInstanceProvider.TOGETHER_AI => "Together AI",
|
|
||||||
HFInstanceProvider.FIREWORKS => "Fireworks AI",
|
|
||||||
HFInstanceProvider.HF_INFERENCE_API => "Hugging Face Inference API",
|
|
||||||
_ => string.Empty,
|
|
||||||
};
|
|
||||||
}
|
|
@ -11,9 +11,9 @@ namespace AIStudio.Provider.HuggingFace;
|
|||||||
|
|
||||||
public sealed class ProviderHuggingFace : BaseProvider
|
public sealed class ProviderHuggingFace : BaseProvider
|
||||||
{
|
{
|
||||||
public ProviderHuggingFace(ILogger logger, HFInstanceProvider hfProvider, Model model) : base($"https://router.huggingface.co/{hfProvider.Endpoints(model)}", logger)
|
public ProviderHuggingFace(ILogger logger, HFInferenceProvider hfProvider, Model model) : base($"https://router.huggingface.co/{hfProvider.Endpoints(model)}", logger)
|
||||||
{
|
{
|
||||||
logger.LogInformation($"We use the instance provider '{hfProvider}'. Thus we use the base URL 'https://router.huggingface.co/{hfProvider.Endpoints(model)}'.");
|
logger.LogInformation($"We use the inferende provider '{hfProvider}'. Thus we use the base URL 'https://router.huggingface.co/{hfProvider.Endpoints(model)}'.");
|
||||||
}
|
}
|
||||||
|
|
||||||
#region Implementation of IProvider
|
#region Implementation of IProvider
|
||||||
|
@ -135,7 +135,7 @@ public static class LLMProvidersExtensions
|
|||||||
/// <returns>The provider instance.</returns>
|
/// <returns>The provider instance.</returns>
|
||||||
public static IProvider CreateProvider(this AIStudio.Settings.Provider providerSettings, ILogger logger)
|
public static IProvider CreateProvider(this AIStudio.Settings.Provider providerSettings, ILogger logger)
|
||||||
{
|
{
|
||||||
return providerSettings.UsedLLMProvider.CreateProvider(providerSettings.InstanceName, providerSettings.Host, providerSettings.Hostname, providerSettings.Model, providerSettings.HFInstanceProvider ,logger);
|
return providerSettings.UsedLLMProvider.CreateProvider(providerSettings.InstanceName, providerSettings.Host, providerSettings.Hostname, providerSettings.Model, providerSettings.HFInferenceProvider ,logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
@ -146,10 +146,10 @@ public static class LLMProvidersExtensions
|
|||||||
/// <returns>The provider instance.</returns>
|
/// <returns>The provider instance.</returns>
|
||||||
public static IProvider CreateProvider(this EmbeddingProvider embeddingProviderSettings, ILogger logger)
|
public static IProvider CreateProvider(this EmbeddingProvider embeddingProviderSettings, ILogger logger)
|
||||||
{
|
{
|
||||||
return embeddingProviderSettings.UsedLLMProvider.CreateProvider(embeddingProviderSettings.Name, embeddingProviderSettings.Host, embeddingProviderSettings.Hostname, embeddingProviderSettings.Model, HFInstanceProvider.NONE,logger);
|
return embeddingProviderSettings.UsedLLMProvider.CreateProvider(embeddingProviderSettings.Name, embeddingProviderSettings.Host, embeddingProviderSettings.Hostname, embeddingProviderSettings.Model, HFInferenceProvider.NONE,logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static IProvider CreateProvider(this LLMProviders provider, string instanceName, Host host, string hostname, Model model, HFInstanceProvider instanceProvider , ILogger logger)
|
private static IProvider CreateProvider(this LLMProviders provider, string instanceName, Host host, string hostname, Model model, HFInferenceProvider inferenceProvider , ILogger logger)
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
@ -165,7 +165,7 @@ public static class LLMProvidersExtensions
|
|||||||
|
|
||||||
LLMProviders.GROQ => new ProviderGroq(logger) { InstanceName = instanceName },
|
LLMProviders.GROQ => new ProviderGroq(logger) { InstanceName = instanceName },
|
||||||
LLMProviders.FIREWORKS => new ProviderFireworks(logger) { InstanceName = instanceName },
|
LLMProviders.FIREWORKS => new ProviderFireworks(logger) { InstanceName = instanceName },
|
||||||
LLMProviders.HUGGINGFACE => new ProviderHuggingFace(logger, instanceProvider, model) { InstanceName = instanceName },
|
LLMProviders.HUGGINGFACE => new ProviderHuggingFace(logger, inferenceProvider, model) { InstanceName = instanceName },
|
||||||
|
|
||||||
LLMProviders.SELF_HOSTED => new ProviderSelfHosted(logger, host, hostname) { InstanceName = instanceName },
|
LLMProviders.SELF_HOSTED => new ProviderSelfHosted(logger, host, hostname) { InstanceName = instanceName },
|
||||||
|
|
||||||
@ -234,10 +234,10 @@ public static class LLMProvidersExtensions
|
|||||||
_ => false,
|
_ => false,
|
||||||
};
|
};
|
||||||
|
|
||||||
public static string GetModelsOverviewURL(this LLMProviders provider, HFInstanceProvider instanceProvider) => provider switch
|
public static string GetModelsOverviewURL(this LLMProviders provider, HFInferenceProvider inferenceProvider) => provider switch
|
||||||
{
|
{
|
||||||
LLMProviders.FIREWORKS => "https://fireworks.ai/models?show=Serverless",
|
LLMProviders.FIREWORKS => "https://fireworks.ai/models?show=Serverless",
|
||||||
LLMProviders.HUGGINGFACE => $"https://huggingface.co/models?inference_provider={instanceProvider.EndpointsId()}",
|
LLMProviders.HUGGINGFACE => $"https://huggingface.co/models?inference_provider={inferenceProvider.EndpointsId()}",
|
||||||
_ => string.Empty,
|
_ => string.Empty,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -25,7 +25,7 @@ public readonly record struct Provider(
|
|||||||
bool IsSelfHosted = false,
|
bool IsSelfHosted = false,
|
||||||
string Hostname = "http://localhost:1234",
|
string Hostname = "http://localhost:1234",
|
||||||
Host Host = Host.NONE,
|
Host Host = Host.NONE,
|
||||||
HFInstanceProvider HFInstanceProvider = HFInstanceProvider.NONE) : ISecretId
|
HFInferenceProvider HFInferenceProvider = HFInferenceProvider.NONE) : ISecretId
|
||||||
{
|
{
|
||||||
#region Overrides of ValueType
|
#region Overrides of ValueType
|
||||||
|
|
||||||
|
@ -94,13 +94,13 @@ public sealed class ProviderValidation
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public string? ValidatingHFInstanceProvider(HFInstanceProvider instanceProvider)
|
public string? ValidatingHFInstanceProvider(HFInferenceProvider inferenceProvider)
|
||||||
{
|
{
|
||||||
if(this.GetProvider() is not LLMProviders.HUGGINGFACE)
|
if(this.GetProvider() is not LLMProviders.HUGGINGFACE)
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
if (instanceProvider is HFInstanceProvider.NONE)
|
if (inferenceProvider is HFInferenceProvider.NONE)
|
||||||
return "Please select an Hugging Face instance provider.";
|
return "Please select an Hugging Face inference provider.";
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user