diff --git a/app/MindWork AI Studio.sln.DotSettings b/app/MindWork AI Studio.sln.DotSettings index 431ff5c..4407989 100644 --- a/app/MindWork AI Studio.sln.DotSettings +++ b/app/MindWork AI Studio.sln.DotSettings @@ -2,6 +2,7 @@ AI EDI ERI + GWDG LLM LM MSG @@ -9,6 +10,7 @@ UI True True + True True True True \ No newline at end of file diff --git a/app/MindWork AI Studio/Provider/GWDG/ProviderGWDG.cs b/app/MindWork AI Studio/Provider/GWDG/ProviderGWDG.cs new file mode 100644 index 0000000..40054cf --- /dev/null +++ b/app/MindWork AI Studio/Provider/GWDG/ProviderGWDG.cs @@ -0,0 +1,139 @@ +using System.Net.Http.Headers; +using System.Runtime.CompilerServices; +using System.Text; +using System.Text.Json; + +using AIStudio.Chat; +using AIStudio.Provider.OpenAI; +using AIStudio.Settings; + +namespace AIStudio.Provider.GWDG; + +public sealed class ProviderGWDG(ILogger logger) : BaseProvider("https://chat-ai.academiccloud.de/v1/", logger) +{ + #region Implementation of IProvider + + /// + public override string Id => LLMProviders.GWDG.ToName(); + + /// + public override string InstanceName { get; set; } = "GWDG SAIA"; + + /// + public override async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default) + { + // Get the API key: + var requestedSecret = await RUST_SERVICE.GetAPIKey(this); + if(!requestedSecret.Success) + yield break; + + // Prepare the system prompt: + var systemPrompt = new Message + { + Role = "system", + Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread, this.logger), + }; + + // Prepare the GWDG HTTP chat request: + var gwdgChatRequest = JsonSerializer.Serialize(new ChatRequest + { + Model = chatModel.Id, + + // Build the messages: + // - First of all the system prompt + // - Then none-empty user and AI messages + Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message + { + Role = n.Role switch + { + ChatRole.USER => "user", + ChatRole.AI => "assistant", + ChatRole.AGENT => "assistant", + ChatRole.RAG => "assistant", + ChatRole.SYSTEM => "system", + + _ => "user", + }, + + Content = n.Content switch + { + ContentText text => text.Text, + _ => string.Empty, + } + }).ToList()], + Stream = true, + }, JSON_SERIALIZER_OPTIONS); + + async Task RequestBuilder() + { + // Build the HTTP post request: + var request = new HttpRequestMessage(HttpMethod.Post, "chat/completions"); + + // Set the authorization header: + request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", await requestedSecret.Secret.Decrypt(ENCRYPTION)); + + // Set the content: + request.Content = new StringContent(gwdgChatRequest, Encoding.UTF8, "application/json"); + return request; + } + + await foreach (var content in this.StreamChatCompletionInternal("GWDG", RequestBuilder, token)) + yield return content; + } + + #pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously + /// + public override async IAsyncEnumerable StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default) + { + yield break; + } + #pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously + + /// + public override async Task> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) + { + var models = await this.LoadModels(token, apiKeyProvisional); + return models.Where(model => !model.Id.StartsWith("e5-mistral-7b-instruct", StringComparison.InvariantCultureIgnoreCase)); + } + + /// + public override Task> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) + { + return Task.FromResult(Enumerable.Empty()); + } + + /// + public override async Task> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default) + { + var models = await this.LoadModels(token, apiKeyProvisional); + return models.Where(model => model.Id.StartsWith("e5-", StringComparison.InvariantCultureIgnoreCase)); + } + + #endregion + + private async Task> LoadModels(CancellationToken token, string? apiKeyProvisional = null) + { + var secretKey = apiKeyProvisional switch + { + not null => apiKeyProvisional, + _ => await RUST_SERVICE.GetAPIKey(this) switch + { + { Success: true } result => await result.Secret.Decrypt(ENCRYPTION), + _ => null, + } + }; + + if (secretKey is null) + return []; + + using var request = new HttpRequestMessage(HttpMethod.Get, "models"); + request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey); + + using var response = await this.httpClient.SendAsync(request, token); + if(!response.IsSuccessStatusCode) + return []; + + var modelResponse = await response.Content.ReadFromJsonAsync(token); + return modelResponse.Data; + } +} \ No newline at end of file diff --git a/app/MindWork AI Studio/Provider/LLMProviders.cs b/app/MindWork AI Studio/Provider/LLMProviders.cs index d35fb7b..6c56c6e 100644 --- a/app/MindWork AI Studio/Provider/LLMProviders.cs +++ b/app/MindWork AI Studio/Provider/LLMProviders.cs @@ -19,4 +19,5 @@ public enum LLMProviders SELF_HOSTED = 4, HELMHOLTZ = 9, + GWDG = 10, } \ No newline at end of file diff --git a/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs b/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs index 554fc50..ce435e1 100644 --- a/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs +++ b/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs @@ -2,6 +2,7 @@ using AIStudio.Provider.Anthropic; using AIStudio.Provider.Fireworks; using AIStudio.Provider.Google; using AIStudio.Provider.Groq; +using AIStudio.Provider.GWDG; using AIStudio.Provider.Helmholtz; using AIStudio.Provider.Mistral; using AIStudio.Provider.OpenAI; @@ -36,6 +37,7 @@ public static class LLMProvidersExtensions LLMProviders.SELF_HOSTED => "Self-hosted", LLMProviders.HELMHOLTZ => "Helmholtz Blablador", + LLMProviders.GWDG => "GWDG SAIA", _ => "Unknown", }; @@ -72,6 +74,7 @@ public static class LLMProvidersExtensions LLMProviders.SELF_HOSTED => Confidence.SELF_HOSTED.WithLevel(settingsManager.GetConfiguredConfidenceLevel(llmProvider)), LLMProviders.HELMHOLTZ => Confidence.GDPR_NO_TRAINING.WithRegion("Europe, Germany").WithSources("https://helmholtz.cloud/services/?serviceID=d7d5c597-a2f6-4bd1-b71e-4d6499d98570").WithLevel(settingsManager.GetConfiguredConfidenceLevel(llmProvider)), + LLMProviders.GWDG => Confidence.GDPR_NO_TRAINING.WithRegion("Europe, Germany").WithSources("https://docs.hpc.gwdg.de/services/chat-ai/data-privacy/index.html").WithLevel(settingsManager.GetConfiguredConfidenceLevel(llmProvider)), _ => Confidence.UNKNOWN.WithLevel(settingsManager.GetConfiguredConfidenceLevel(llmProvider)), }; @@ -98,6 +101,7 @@ public static class LLMProvidersExtensions LLMProviders.ANTHROPIC => false, LLMProviders.FIREWORKS => false, LLMProviders.X => false, + LLMProviders.GWDG => false, // // Self-hosted providers are treated as a special case anyway. @@ -147,6 +151,7 @@ public static class LLMProvidersExtensions LLMProviders.SELF_HOSTED => new ProviderSelfHosted(logger, host, hostname) { InstanceName = instanceName }, LLMProviders.HELMHOLTZ => new ProviderHelmholtz(logger) { InstanceName = instanceName }, + LLMProviders.GWDG => new ProviderGWDG(logger) { InstanceName = instanceName }, _ => new NoProvider(), }; @@ -170,6 +175,7 @@ public static class LLMProvidersExtensions LLMProviders.FIREWORKS => "https://fireworks.ai/login", LLMProviders.HELMHOLTZ => "https://sdlaml.pages.jsc.fz-juelich.de/ai/guides/blablador_api_access/#step-1-register-on-gitlab", + LLMProviders.GWDG => "https://docs.hpc.gwdg.de/services/saia/index.html#api-request", _ => string.Empty, }; @@ -241,6 +247,7 @@ public static class LLMProvidersExtensions LLMProviders.GROQ => true, LLMProviders.FIREWORKS => true, LLMProviders.HELMHOLTZ => true, + LLMProviders.GWDG => true, LLMProviders.SELF_HOSTED => host is Host.OLLAMA, @@ -258,6 +265,7 @@ public static class LLMProvidersExtensions LLMProviders.GROQ => true, LLMProviders.FIREWORKS => true, LLMProviders.HELMHOLTZ => true, + LLMProviders.GWDG => true, _ => false, }; diff --git a/app/MindWork AI Studio/wwwroot/changelog/v0.9.31.md b/app/MindWork AI Studio/wwwroot/changelog/v0.9.31.md index bac28d4..3145150 100644 --- a/app/MindWork AI Studio/wwwroot/changelog/v0.9.31.md +++ b/app/MindWork AI Studio/wwwroot/changelog/v0.9.31.md @@ -1,2 +1,3 @@ # v0.9.31, build 206 (2025-02-xx xx:xx UTC) -- Added Helmholtz (aka "Blablador") as provider. This provider is available to all researchers and employees of the 18 Helmholtz Centers as well as all eduGAIN organizations worldwide. \ No newline at end of file +- Added Helmholtz (aka "Blablador") as provider. This provider is available to all researchers and employees of the 18 Helmholtz Centers as well as all eduGAIN organizations worldwide. +- Added GWDG SAIA as provider. This provider is available to all researchers and employees of the GWDG, the Max Planck Society, the 18 Helmholtz Centers, and most German universities. \ No newline at end of file