mirror of
https://github.com/MindWorkAI/AI-Studio.git
synced 2025-04-28 08:19:47 +00:00
Works at the moment, but only for a hardcoded instance provider.
This commit is contained in:
parent
693fe5e161
commit
727da036a5
@ -0,0 +1,110 @@
|
||||
using System.Net.Http.Headers;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
using AIStudio.Chat;
|
||||
using AIStudio.Provider.OpenAI;
|
||||
using AIStudio.Settings;
|
||||
|
||||
namespace AIStudio.Provider.HuggingFace;
|
||||
|
||||
public sealed class ProviderHuggingFace(ILogger logger) : BaseProvider("https://router.huggingface.co/cerebras/v1/", logger)
|
||||
{
|
||||
#region Implementation of IProvider
|
||||
|
||||
/// <inheritdoc />
|
||||
public override string Id => LLMProviders.HUGGINGFACE.ToName();
|
||||
|
||||
/// <inheritdoc />
|
||||
public override string InstanceName { get; set; } = "HuggingFace";
|
||||
|
||||
/// <inheritdoc />
|
||||
public override async IAsyncEnumerable<string> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
if(!requestedSecret.Success)
|
||||
yield break;
|
||||
|
||||
// Prepare the system prompt:
|
||||
var systemPrompt = new Message
|
||||
{
|
||||
Role = "system",
|
||||
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread, this.logger),
|
||||
};
|
||||
|
||||
// Prepare the HuggingFace HTTP chat request:
|
||||
var huggingfaceChatRequest = JsonSerializer.Serialize(new ChatRequest
|
||||
{
|
||||
Model = chatModel.Id,
|
||||
|
||||
// Build the messages:
|
||||
// - First of all the system prompt
|
||||
// - Then none-empty user and AI messages
|
||||
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
|
||||
{
|
||||
Role = n.Role switch
|
||||
{
|
||||
ChatRole.USER => "user",
|
||||
ChatRole.AI => "assistant",
|
||||
ChatRole.AGENT => "assistant",
|
||||
ChatRole.SYSTEM => "system",
|
||||
|
||||
_ => "user",
|
||||
},
|
||||
|
||||
Content = n.Content switch
|
||||
{
|
||||
ContentText text => text.Text,
|
||||
_ => string.Empty,
|
||||
}
|
||||
}).ToList()],
|
||||
Stream = true,
|
||||
}, JSON_SERIALIZER_OPTIONS);
|
||||
|
||||
async Task<HttpRequestMessage> RequestBuilder()
|
||||
{
|
||||
// Build the HTTP post request:
|
||||
var request = new HttpRequestMessage(HttpMethod.Post, "chat/completions");
|
||||
|
||||
// Set the authorization header:
|
||||
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", await requestedSecret.Secret.Decrypt(ENCRYPTION));
|
||||
|
||||
// Set the content:
|
||||
request.Content = new StringContent(huggingfaceChatRequest, Encoding.UTF8, "application/json");
|
||||
return request;
|
||||
}
|
||||
|
||||
await foreach (var content in this.StreamChatCompletionInternal<ResponseStreamLine>("HuggingFace", RequestBuilder, token))
|
||||
yield return content;
|
||||
}
|
||||
|
||||
#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||
/// <inheritdoc />
|
||||
public override async IAsyncEnumerable<ImageURL> StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||
|
||||
/// <inheritdoc />
|
||||
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||
{
|
||||
return Task.FromResult(Enumerable.Empty<Model>());
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||
{
|
||||
return Task.FromResult(Enumerable.Empty<Model>());
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public override Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||
{
|
||||
return Task.FromResult(Enumerable.Empty<Model>());
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
@ -17,6 +17,7 @@ public enum LLMProviders
|
||||
|
||||
FIREWORKS = 5,
|
||||
GROQ = 6,
|
||||
HUGGINGFACE = 13,
|
||||
|
||||
SELF_HOSTED = 4,
|
||||
|
||||
|
@ -6,6 +6,7 @@ using AIStudio.Provider.Google;
|
||||
using AIStudio.Provider.Groq;
|
||||
using AIStudio.Provider.GWDG;
|
||||
using AIStudio.Provider.Helmholtz;
|
||||
using AIStudio.Provider.HuggingFace;
|
||||
using AIStudio.Provider.Mistral;
|
||||
using AIStudio.Provider.OpenAI;
|
||||
using AIStudio.Provider.SelfHosted;
|
||||
@ -37,6 +38,7 @@ public static class LLMProvidersExtensions
|
||||
|
||||
LLMProviders.GROQ => "Groq",
|
||||
LLMProviders.FIREWORKS => "Fireworks.ai",
|
||||
LLMProviders.HUGGINGFACE => "Hugging Face",
|
||||
|
||||
LLMProviders.SELF_HOSTED => "Self-hosted",
|
||||
|
||||
@ -58,6 +60,9 @@ public static class LLMProvidersExtensions
|
||||
|
||||
LLMProviders.FIREWORKS => Confidence.USA_NOT_TRUSTED.WithRegion("America, U.S.").WithSources("https://fireworks.ai/terms-of-service").WithLevel(settingsManager.GetConfiguredConfidenceLevel(llmProvider)),
|
||||
|
||||
// Not trusted, because huggingface only routes you to a third-party-provider and we can't make sure they do not use your data
|
||||
LLMProviders.HUGGINGFACE => Confidence.USA_NOT_TRUSTED.WithRegion("America, U.S.").WithSources("https://huggingface.co/terms-of-service").WithLevel(settingsManager.GetConfiguredConfidenceLevel(llmProvider)),
|
||||
|
||||
LLMProviders.OPEN_AI => Confidence.USA_NO_TRAINING.WithRegion("America, U.S.").WithSources(
|
||||
"https://platform.openai.com/docs/models/default-usage-policies-by-endpoint",
|
||||
"https://openai.com/policies/terms-of-use/",
|
||||
@ -112,6 +117,7 @@ public static class LLMProvidersExtensions
|
||||
LLMProviders.X => false,
|
||||
LLMProviders.GWDG => false,
|
||||
LLMProviders.DEEP_SEEK => false,
|
||||
LLMProviders.HUGGINGFACE => false,
|
||||
|
||||
//
|
||||
// Self-hosted providers are treated as a special case anyway.
|
||||
@ -159,6 +165,7 @@ public static class LLMProvidersExtensions
|
||||
|
||||
LLMProviders.GROQ => new ProviderGroq(logger) { InstanceName = instanceName },
|
||||
LLMProviders.FIREWORKS => new ProviderFireworks(logger) { InstanceName = instanceName },
|
||||
LLMProviders.HUGGINGFACE => new ProviderHuggingFace(logger) { InstanceName = instanceName },
|
||||
|
||||
LLMProviders.SELF_HOSTED => new ProviderSelfHosted(logger, host, hostname) { InstanceName = instanceName },
|
||||
|
||||
@ -187,6 +194,7 @@ public static class LLMProvidersExtensions
|
||||
|
||||
LLMProviders.GROQ => "https://console.groq.com/",
|
||||
LLMProviders.FIREWORKS => "https://fireworks.ai/login",
|
||||
LLMProviders.HUGGINGFACE => "https://huggingface.co/login",
|
||||
|
||||
LLMProviders.HELMHOLTZ => "https://sdlaml.pages.jsc.fz-juelich.de/ai/guides/blablador_api_access/#step-1-register-on-gitlab",
|
||||
LLMProviders.GWDG => "https://docs.hpc.gwdg.de/services/saia/index.html#api-request",
|
||||
@ -205,6 +213,7 @@ public static class LLMProvidersExtensions
|
||||
LLMProviders.FIREWORKS => "https://fireworks.ai/account/billing",
|
||||
LLMProviders.DEEP_SEEK => "https://platform.deepseek.com/usage",
|
||||
LLMProviders.ALIBABA_CLOUD => "https://usercenter2-intl.aliyun.com/billing",
|
||||
LLMProviders.HUGGINGFACE => "https://huggingface.co/settings/billing",
|
||||
|
||||
_ => string.Empty,
|
||||
};
|
||||
@ -220,6 +229,7 @@ public static class LLMProvidersExtensions
|
||||
LLMProviders.GOOGLE => true,
|
||||
LLMProviders.DEEP_SEEK => true,
|
||||
LLMProviders.ALIBABA_CLOUD => true,
|
||||
LLMProviders.HUGGINGFACE => true,
|
||||
|
||||
_ => false,
|
||||
};
|
||||
@ -227,12 +237,14 @@ public static class LLMProvidersExtensions
|
||||
public static string GetModelsOverviewURL(this LLMProviders provider) => provider switch
|
||||
{
|
||||
LLMProviders.FIREWORKS => "https://fireworks.ai/models?show=Serverless",
|
||||
LLMProviders.HUGGINGFACE => "https://huggingface.co/models?inference_provider=all",
|
||||
_ => string.Empty,
|
||||
};
|
||||
|
||||
public static bool IsLLMModelProvidedManually(this LLMProviders provider) => provider switch
|
||||
{
|
||||
LLMProviders.FIREWORKS => true,
|
||||
LLMProviders.HUGGINGFACE => true,
|
||||
_ => false,
|
||||
};
|
||||
|
||||
@ -268,6 +280,7 @@ public static class LLMProvidersExtensions
|
||||
LLMProviders.FIREWORKS => true,
|
||||
LLMProviders.HELMHOLTZ => true,
|
||||
LLMProviders.GWDG => true,
|
||||
LLMProviders.HUGGINGFACE => true,
|
||||
|
||||
LLMProviders.SELF_HOSTED => host is Host.OLLAMA,
|
||||
|
||||
@ -288,6 +301,7 @@ public static class LLMProvidersExtensions
|
||||
LLMProviders.FIREWORKS => true,
|
||||
LLMProviders.HELMHOLTZ => true,
|
||||
LLMProviders.GWDG => true,
|
||||
LLMProviders.HUGGINGFACE => true,
|
||||
|
||||
_ => false,
|
||||
};
|
||||
|
Loading…
Reference in New Issue
Block a user