diff --git a/app/MindWork AI Studio.sln.DotSettings b/app/MindWork AI Studio.sln.DotSettings
index 44079898..6cbaf40c 100644
--- a/app/MindWork AI Studio.sln.DotSettings
+++ b/app/MindWork AI Studio.sln.DotSettings
@@ -3,14 +3,17 @@
EDI
ERI
GWDG
+ HF
LLM
LM
MSG
RAG
UI
+ URL
True
True
True
+ True
True
True
True
\ No newline at end of file
diff --git a/app/MindWork AI Studio/Components/Settings/SettingsPanelProviders.razor b/app/MindWork AI Studio/Components/Settings/SettingsPanelProviders.razor
index 51db26b4..9ab1a81c 100644
--- a/app/MindWork AI Studio/Components/Settings/SettingsPanelProviders.razor
+++ b/app/MindWork AI Studio/Components/Settings/SettingsPanelProviders.razor
@@ -29,7 +29,7 @@
@context.Num
@context.InstanceName
- @context.UsedLLMProvider
+ @context.UsedLLMProvider.ToName()
@if (context.UsedLLMProvider is not LLMProviders.SELF_HOSTED)
{
diff --git a/app/MindWork AI Studio/Components/Settings/SettingsPanelProviders.razor.cs b/app/MindWork AI Studio/Components/Settings/SettingsPanelProviders.razor.cs
index 6ca2f6ca..5a71925b 100644
--- a/app/MindWork AI Studio/Components/Settings/SettingsPanelProviders.razor.cs
+++ b/app/MindWork AI Studio/Components/Settings/SettingsPanelProviders.razor.cs
@@ -65,6 +65,7 @@ public partial class SettingsPanelProviders : SettingsPanelBase
{ x => x.IsSelfHosted, provider.IsSelfHosted },
{ x => x.IsEditing, true },
{ x => x.DataHost, provider.Host },
+ { x => x.HfInstanceProviderId, provider.HFInstanceProvider },
};
var dialogReference = await this.DialogService.ShowAsync("Edit LLM Provider", dialogParameters, DialogOptions.FULLSCREEN);
diff --git a/app/MindWork AI Studio/Dialogs/ProviderDialog.razor b/app/MindWork AI Studio/Dialogs/ProviderDialog.razor
index a7d5706e..5998f12f 100644
--- a/app/MindWork AI Studio/Dialogs/ProviderDialog.razor
+++ b/app/MindWork AI Studio/Dialogs/ProviderDialog.razor
@@ -1,4 +1,5 @@
@using AIStudio.Provider
+@using AIStudio.Provider.HuggingFace
@using AIStudio.Provider.SelfHosted
@@ -28,38 +29,55 @@
InputType="InputType.Password"
Validation="@this.providerValidation.ValidatingAPIKey"
/>
-
-
-
- @foreach (Host host in Enum.GetValues(typeof(Host)))
- {
- @host.Name()
- }
-
+ @if (this.DataLLMProvider.IsHostnameNeeded())
+ {
+
+ }
+
+ @if (this.DataLLMProvider.IsHostNeeded())
+ {
+
+ @foreach (Host host in Enum.GetValues(typeof(Host)))
+ {
+ @host.Name()
+ }
+
+ }
+
+ @if (this.DataLLMProvider.IsHFInstanceProviderNeeded())
+ {
+
+ @foreach (HFInstanceProvider instanceProvider in Enum.GetValues(typeof(HFInstanceProvider)))
+ {
+ @instanceProvider.ToName()
+ }
+
+
+ Please double-check if your model name matches the curl specifications provided by the instance provider. If it doesn't, you might get a Not Found error when trying to use the model. Here's a curl example.
+ }
@if (this.DataLLMProvider.IsLLMModelProvidedManually())
{
- Show available models
+ Show available models
+ /// The HFInstanceProvider to use, e.g., CEREBRAS.
+ ///
+ [Parameter]
+ public HFInstanceProvider HfInstanceProviderId { get; set; } = HFInstanceProvider.NONE;
+
///
/// Is this provider self-hosted?
///
@@ -122,10 +129,16 @@ public partial class ProviderDialog : ComponentBase, ISecretId
Id = this.DataId,
InstanceName = this.DataInstanceName,
UsedLLMProvider = this.DataLLMProvider,
- Model = this.DataLLMProvider is LLMProviders.FIREWORKS ? new Model(this.dataManuallyModel, null) : this.DataModel,
+ Model = this.DataLLMProvider switch
+ {
+ LLMProviders.FIREWORKS => new Model(this.dataManuallyModel, null),
+ LLMProviders.HUGGINGFACE => new Model(this.dataManuallyModel, null),
+ _ => this.DataModel
+ },
IsSelfHosted = this.DataLLMProvider is LLMProviders.SELF_HOSTED,
Hostname = cleanedHostname.EndsWith('/') ? cleanedHostname[..^1] : cleanedHostname,
Host = this.DataHost,
+ HFInstanceProvider = this.HfInstanceProviderId,
};
}
@@ -146,8 +159,8 @@ public partial class ProviderDialog : ComponentBase, ISecretId
{
this.dataEditingPreviousInstanceName = this.DataInstanceName.ToLowerInvariant();
- // When using Fireworks, we must copy the model name:
- if (this.DataLLMProvider is LLMProviders.FIREWORKS)
+ // When using Fireworks or Hugging Face, we must copy the model name:
+ if (this.DataLLMProvider is LLMProviders.FIREWORKS or LLMProviders.HUGGINGFACE)
this.dataManuallyModel = this.DataModel.Id;
//
@@ -230,7 +243,7 @@ public partial class ProviderDialog : ComponentBase, ISecretId
private string? ValidateManuallyModel(string manuallyModel)
{
- if (this.DataLLMProvider is LLMProviders.FIREWORKS && string.IsNullOrWhiteSpace(manuallyModel))
+ if ((this.DataLLMProvider is LLMProviders.FIREWORKS or LLMProviders.HUGGINGFACE) && string.IsNullOrWhiteSpace(manuallyModel))
return "Please enter a model name.";
return null;
diff --git a/app/MindWork AI Studio/Provider/Confidence.cs b/app/MindWork AI Studio/Provider/Confidence.cs
index 087ac4e0..a49c2781 100644
--- a/app/MindWork AI Studio/Provider/Confidence.cs
+++ b/app/MindWork AI Studio/Provider/Confidence.cs
@@ -35,10 +35,10 @@ public sealed record Confidence
""",
};
- public static readonly Confidence USA_NOT_TRUSTED = new()
+ public static readonly Confidence USA_HUB = new()
{
- Level = ConfidenceLevel.UNTRUSTED,
- Description = "The provider operates its service from the USA and is subject to **U.S. jurisdiction**. In case of suspicion, authorities in the USA can access your data. The provider's terms of service state that **all your data can be used by the provider at will.**",
+ Level = ConfidenceLevel.UNKNOWN,
+ Description = "The provider operates its service from the USA and is subject to **U.S. jurisdiction**. In case of suspicion, authorities in the USA can access your data. Please inform yourself about the use of your data. We do not know if your data is safe.",
};
public static readonly Confidence UNKNOWN = new()
diff --git a/app/MindWork AI Studio/Provider/HuggingFace/HFInstanceProvider.cs b/app/MindWork AI Studio/Provider/HuggingFace/HFInstanceProvider.cs
new file mode 100644
index 00000000..63221290
--- /dev/null
+++ b/app/MindWork AI Studio/Provider/HuggingFace/HFInstanceProvider.cs
@@ -0,0 +1,18 @@
+namespace AIStudio.Provider.HuggingFace;
+
+///
+/// Enum for instance providers that Hugging Face supports.
+///
+public enum HFInstanceProvider
+{
+ NONE,
+
+ CEREBRAS,
+ NEBIUS_AI_STUDIO,
+ SAMBANOVA,
+ NOVITA,
+ HYPERBOLIC,
+ TOGETHER_AI,
+ FIREWORKS,
+ HF_INFERENCE_API,
+}
\ No newline at end of file
diff --git a/app/MindWork AI Studio/Provider/HuggingFace/HFInstanceProviderExtensions.cs b/app/MindWork AI Studio/Provider/HuggingFace/HFInstanceProviderExtensions.cs
new file mode 100644
index 00000000..b0d81fba
--- /dev/null
+++ b/app/MindWork AI Studio/Provider/HuggingFace/HFInstanceProviderExtensions.cs
@@ -0,0 +1,43 @@
+namespace AIStudio.Provider.HuggingFace;
+
+public static class HFInstanceProviderExtensions
+{
+ public static string Endpoints(this HFInstanceProvider provider, Model model) => provider switch
+ {
+ HFInstanceProvider.CEREBRAS => "cerebras/v1/",
+ HFInstanceProvider.NEBIUS_AI_STUDIO => "nebius/v1/",
+ HFInstanceProvider.SAMBANOVA => "sambanova/v1/",
+ HFInstanceProvider.NOVITA => "novita/v3/openai/",
+ HFInstanceProvider.HYPERBOLIC => "hyperbolic/v1/",
+ HFInstanceProvider.TOGETHER_AI => "together/v1/",
+ HFInstanceProvider.FIREWORKS => "fireworks-ai/inference/v1/",
+ HFInstanceProvider.HF_INFERENCE_API => $"hf-inference/models/{model.ToString()}/v1/",
+ _ => string.Empty,
+ };
+
+ public static string EndpointsId(this HFInstanceProvider provider) => provider switch
+ {
+ HFInstanceProvider.CEREBRAS => "cerebras",
+ HFInstanceProvider.NEBIUS_AI_STUDIO => "nebius",
+ HFInstanceProvider.SAMBANOVA => "sambanova",
+ HFInstanceProvider.NOVITA => "novita",
+ HFInstanceProvider.HYPERBOLIC => "hyperbolic",
+ HFInstanceProvider.TOGETHER_AI => "together",
+ HFInstanceProvider.FIREWORKS => "fireworks",
+ HFInstanceProvider.HF_INFERENCE_API => "hf-inference",
+ _ => string.Empty,
+ };
+
+ public static string ToName(this HFInstanceProvider provider) => provider switch
+ {
+ HFInstanceProvider.CEREBRAS => "Cerebras",
+ HFInstanceProvider.NEBIUS_AI_STUDIO => "Nebius AI Studio",
+ HFInstanceProvider.SAMBANOVA => "Sambanova",
+ HFInstanceProvider.NOVITA => "Novita",
+ HFInstanceProvider.HYPERBOLIC => "Hyperbolic",
+ HFInstanceProvider.TOGETHER_AI => "Together AI",
+ HFInstanceProvider.FIREWORKS => "Fireworks AI",
+ HFInstanceProvider.HF_INFERENCE_API => "Hugging Face Inference API",
+ _ => string.Empty,
+ };
+}
\ No newline at end of file
diff --git a/app/MindWork AI Studio/Provider/HuggingFace/ProviderHuggingFace.cs b/app/MindWork AI Studio/Provider/HuggingFace/ProviderHuggingFace.cs
new file mode 100644
index 00000000..25f2baae
--- /dev/null
+++ b/app/MindWork AI Studio/Provider/HuggingFace/ProviderHuggingFace.cs
@@ -0,0 +1,115 @@
+using System.Net.Http.Headers;
+using System.Runtime.CompilerServices;
+using System.Text;
+using System.Text.Json;
+
+using AIStudio.Chat;
+using AIStudio.Provider.OpenAI;
+using AIStudio.Settings;
+
+namespace AIStudio.Provider.HuggingFace;
+
+public sealed class ProviderHuggingFace : BaseProvider
+{
+ public ProviderHuggingFace(ILogger logger, HFInstanceProvider hfProvider, Model model) : base($"https://router.huggingface.co/{hfProvider.Endpoints(model)}", logger)
+ {
+ logger.LogInformation($"We use the instance provider '{hfProvider}'. Thus we use the base URL 'https://router.huggingface.co/{hfProvider.Endpoints(model)}'.");
+ }
+
+ #region Implementation of IProvider
+
+ ///
+ public override string Id => LLMProviders.HUGGINGFACE.ToName();
+
+ ///
+ public override string InstanceName { get; set; } = "HuggingFace";
+
+ ///
+ public override async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
+ {
+ // Get the API key:
+ var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
+ if(!requestedSecret.Success)
+ yield break;
+
+ // Prepare the system prompt:
+ var systemPrompt = new Message
+ {
+ Role = "system",
+ Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread, this.logger),
+ };
+
+ // Prepare the HuggingFace HTTP chat request:
+ var huggingfaceChatRequest = JsonSerializer.Serialize(new ChatRequest
+ {
+ Model = chatModel.Id,
+
+ // Build the messages:
+ // - First of all the system prompt
+ // - Then none-empty user and AI messages
+ Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
+ {
+ Role = n.Role switch
+ {
+ ChatRole.USER => "user",
+ ChatRole.AI => "assistant",
+ ChatRole.AGENT => "assistant",
+ ChatRole.SYSTEM => "system",
+
+ _ => "user",
+ },
+
+ Content = n.Content switch
+ {
+ ContentText text => text.Text,
+ _ => string.Empty,
+ }
+ }).ToList()],
+ Stream = true,
+ }, JSON_SERIALIZER_OPTIONS);
+
+ async Task RequestBuilder()
+ {
+ // Build the HTTP post request:
+ var request = new HttpRequestMessage(HttpMethod.Post, "chat/completions");
+
+ // Set the authorization header:
+ request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", await requestedSecret.Secret.Decrypt(ENCRYPTION));
+
+ // Set the content:
+ request.Content = new StringContent(huggingfaceChatRequest, Encoding.UTF8, "application/json");
+ return request;
+ }
+
+ await foreach (var content in this.StreamChatCompletionInternal("HuggingFace", RequestBuilder, token))
+ yield return content;
+ }
+
+ #pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously
+ ///
+ public override async IAsyncEnumerable StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default)
+ {
+ yield break;
+ }
+ #pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
+
+ ///
+ public override Task> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
+ {
+ return Task.FromResult(Enumerable.Empty());
+ }
+
+ ///
+ public override Task> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
+ {
+ return Task.FromResult(Enumerable.Empty());
+ }
+
+ ///
+ public override Task> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
+ {
+ return Task.FromResult(Enumerable.Empty());
+ }
+
+ #endregion
+}
\ No newline at end of file
diff --git a/app/MindWork AI Studio/Provider/LLMProviders.cs b/app/MindWork AI Studio/Provider/LLMProviders.cs
index 1c65835f..118d68aa 100644
--- a/app/MindWork AI Studio/Provider/LLMProviders.cs
+++ b/app/MindWork AI Studio/Provider/LLMProviders.cs
@@ -17,6 +17,7 @@ public enum LLMProviders
FIREWORKS = 5,
GROQ = 6,
+ HUGGINGFACE = 13,
SELF_HOSTED = 4,
diff --git a/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs b/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs
index 72f06777..8abb0bd4 100644
--- a/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs
+++ b/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs
@@ -6,6 +6,7 @@ using AIStudio.Provider.Google;
using AIStudio.Provider.Groq;
using AIStudio.Provider.GWDG;
using AIStudio.Provider.Helmholtz;
+using AIStudio.Provider.HuggingFace;
using AIStudio.Provider.Mistral;
using AIStudio.Provider.OpenAI;
using AIStudio.Provider.SelfHosted;
@@ -37,6 +38,7 @@ public static class LLMProvidersExtensions
LLMProviders.GROQ => "Groq",
LLMProviders.FIREWORKS => "Fireworks.ai",
+ LLMProviders.HUGGINGFACE => "Hugging Face",
LLMProviders.SELF_HOSTED => "Self-hosted",
@@ -56,7 +58,10 @@ public static class LLMProvidersExtensions
{
LLMProviders.NONE => Confidence.NONE,
- LLMProviders.FIREWORKS => Confidence.USA_NOT_TRUSTED.WithRegion("America, U.S.").WithSources("https://fireworks.ai/terms-of-service").WithLevel(settingsManager.GetConfiguredConfidenceLevel(llmProvider)),
+ LLMProviders.FIREWORKS => Confidence.USA_HUB.WithRegion("America, U.S.").WithSources("https://fireworks.ai/terms-of-service").WithLevel(settingsManager.GetConfiguredConfidenceLevel(llmProvider)),
+
+ // Not trusted, because huggingface only routes you to a third-party-provider and we can't make sure they do not use your data
+ LLMProviders.HUGGINGFACE => Confidence.USA_HUB.WithRegion("America, U.S.").WithSources("https://huggingface.co/terms-of-service").WithLevel(settingsManager.GetConfiguredConfidenceLevel(llmProvider)),
LLMProviders.OPEN_AI => Confidence.USA_NO_TRAINING.WithRegion("America, U.S.").WithSources(
"https://platform.openai.com/docs/models/default-usage-policies-by-endpoint",
@@ -112,6 +117,7 @@ public static class LLMProvidersExtensions
LLMProviders.X => false,
LLMProviders.GWDG => false,
LLMProviders.DEEP_SEEK => false,
+ LLMProviders.HUGGINGFACE => false,
//
// Self-hosted providers are treated as a special case anyway.
@@ -129,7 +135,7 @@ public static class LLMProvidersExtensions
/// The provider instance.
public static IProvider CreateProvider(this AIStudio.Settings.Provider providerSettings, ILogger logger)
{
- return providerSettings.UsedLLMProvider.CreateProvider(providerSettings.InstanceName, providerSettings.Host, providerSettings.Hostname, logger);
+ return providerSettings.UsedLLMProvider.CreateProvider(providerSettings.InstanceName, providerSettings.Host, providerSettings.Hostname, providerSettings.Model, providerSettings.HFInstanceProvider ,logger);
}
///
@@ -140,10 +146,10 @@ public static class LLMProvidersExtensions
/// The provider instance.
public static IProvider CreateProvider(this EmbeddingProvider embeddingProviderSettings, ILogger logger)
{
- return embeddingProviderSettings.UsedLLMProvider.CreateProvider(embeddingProviderSettings.Name, embeddingProviderSettings.Host, embeddingProviderSettings.Hostname, logger);
+ return embeddingProviderSettings.UsedLLMProvider.CreateProvider(embeddingProviderSettings.Name, embeddingProviderSettings.Host, embeddingProviderSettings.Hostname, embeddingProviderSettings.Model, HFInstanceProvider.NONE,logger);
}
- private static IProvider CreateProvider(this LLMProviders provider, string instanceName, Host host, string hostname, ILogger logger)
+ private static IProvider CreateProvider(this LLMProviders provider, string instanceName, Host host, string hostname, Model model, HFInstanceProvider instanceProvider , ILogger logger)
{
try
{
@@ -159,6 +165,7 @@ public static class LLMProvidersExtensions
LLMProviders.GROQ => new ProviderGroq(logger) { InstanceName = instanceName },
LLMProviders.FIREWORKS => new ProviderFireworks(logger) { InstanceName = instanceName },
+ LLMProviders.HUGGINGFACE => new ProviderHuggingFace(logger, instanceProvider, model) { InstanceName = instanceName },
LLMProviders.SELF_HOSTED => new ProviderSelfHosted(logger, host, hostname) { InstanceName = instanceName },
@@ -187,6 +194,7 @@ public static class LLMProvidersExtensions
LLMProviders.GROQ => "https://console.groq.com/",
LLMProviders.FIREWORKS => "https://fireworks.ai/login",
+ LLMProviders.HUGGINGFACE => "https://huggingface.co/login",
LLMProviders.HELMHOLTZ => "https://sdlaml.pages.jsc.fz-juelich.de/ai/guides/blablador_api_access/#step-1-register-on-gitlab",
LLMProviders.GWDG => "https://docs.hpc.gwdg.de/services/saia/index.html#api-request",
@@ -205,6 +213,7 @@ public static class LLMProvidersExtensions
LLMProviders.FIREWORKS => "https://fireworks.ai/account/billing",
LLMProviders.DEEP_SEEK => "https://platform.deepseek.com/usage",
LLMProviders.ALIBABA_CLOUD => "https://usercenter2-intl.aliyun.com/billing",
+ LLMProviders.HUGGINGFACE => "https://huggingface.co/settings/billing",
_ => string.Empty,
};
@@ -220,19 +229,22 @@ public static class LLMProvidersExtensions
LLMProviders.GOOGLE => true,
LLMProviders.DEEP_SEEK => true,
LLMProviders.ALIBABA_CLOUD => true,
+ LLMProviders.HUGGINGFACE => true,
_ => false,
};
- public static string GetModelsOverviewURL(this LLMProviders provider) => provider switch
+ public static string GetModelsOverviewURL(this LLMProviders provider, HFInstanceProvider instanceProvider) => provider switch
{
LLMProviders.FIREWORKS => "https://fireworks.ai/models?show=Serverless",
+ LLMProviders.HUGGINGFACE => $"https://huggingface.co/models?inference_provider={instanceProvider.EndpointsId()}",
_ => string.Empty,
};
public static bool IsLLMModelProvidedManually(this LLMProviders provider) => provider switch
{
LLMProviders.FIREWORKS => true,
+ LLMProviders.HUGGINGFACE => true,
_ => false,
};
@@ -268,6 +280,7 @@ public static class LLMProvidersExtensions
LLMProviders.FIREWORKS => true,
LLMProviders.HELMHOLTZ => true,
LLMProviders.GWDG => true,
+ LLMProviders.HUGGINGFACE => true,
LLMProviders.SELF_HOSTED => host is Host.OLLAMA,
@@ -288,6 +301,7 @@ public static class LLMProvidersExtensions
LLMProviders.FIREWORKS => true,
LLMProviders.HELMHOLTZ => true,
LLMProviders.GWDG => true,
+ LLMProviders.HUGGINGFACE => true,
_ => false,
};
@@ -317,4 +331,10 @@ public static class LLMProvidersExtensions
return true;
}
+
+ public static bool IsHFInstanceProviderNeeded(this LLMProviders provider) => provider switch
+ {
+ LLMProviders.HUGGINGFACE => true,
+ _ => false,
+ };
}
\ No newline at end of file
diff --git a/app/MindWork AI Studio/Settings/Provider.cs b/app/MindWork AI Studio/Settings/Provider.cs
index b349016d..6aefc5b5 100644
--- a/app/MindWork AI Studio/Settings/Provider.cs
+++ b/app/MindWork AI Studio/Settings/Provider.cs
@@ -1,7 +1,7 @@
using System.Text.Json.Serialization;
using AIStudio.Provider;
-
+using AIStudio.Provider.HuggingFace;
using Host = AIStudio.Provider.SelfHosted.Host;
namespace AIStudio.Settings;
@@ -24,7 +24,8 @@ public readonly record struct Provider(
Model Model,
bool IsSelfHosted = false,
string Hostname = "http://localhost:1234",
- Host Host = Host.NONE) : ISecretId
+ Host Host = Host.NONE,
+ HFInstanceProvider HFInstanceProvider = HFInstanceProvider.NONE) : ISecretId
{
#region Overrides of ValueType
diff --git a/app/MindWork AI Studio/Tools/Validation/ProviderValidation.cs b/app/MindWork AI Studio/Tools/Validation/ProviderValidation.cs
index be7b16be..12d27b43 100644
--- a/app/MindWork AI Studio/Tools/Validation/ProviderValidation.cs
+++ b/app/MindWork AI Studio/Tools/Validation/ProviderValidation.cs
@@ -1,5 +1,5 @@
using AIStudio.Provider;
-
+using AIStudio.Provider.HuggingFace;
using Host = AIStudio.Provider.SelfHosted.Host;
namespace AIStudio.Tools.Validation;
@@ -93,4 +93,15 @@ public sealed class ProviderValidation
return null;
}
+
+ public string? ValidatingHFInstanceProvider(HFInstanceProvider instanceProvider)
+ {
+ if(this.GetProvider() is not LLMProviders.HUGGINGFACE)
+ return null;
+
+ if (instanceProvider is HFInstanceProvider.NONE)
+ return "Please select an Hugging Face instance provider.";
+
+ return null;
+ }
}
\ No newline at end of file