From e5d46b7481e1b23eccd836cf548108cab99aed69 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Peer=20Sch=C3=BCtt?=
<20603780+peerschuett@users.noreply.github.com>
Date: Fri, 29 Aug 2025 16:09:38 +0200
Subject: [PATCH] Started adding Perplexity as provider
---
.../Provider/LLMProviders.cs | 1 +
.../Provider/LLMProvidersExtensions.cs | 1 +
.../Provider/Perplexity/ProviderPerplexity.cs | 157 ++++++++++++++++++
3 files changed, 159 insertions(+)
create mode 100644 app/MindWork AI Studio/Provider/Perplexity/ProviderPerplexity.cs
diff --git a/app/MindWork AI Studio/Provider/LLMProviders.cs b/app/MindWork AI Studio/Provider/LLMProviders.cs
index 118d68aa..fceb9b15 100644
--- a/app/MindWork AI Studio/Provider/LLMProviders.cs
+++ b/app/MindWork AI Studio/Provider/LLMProviders.cs
@@ -14,6 +14,7 @@ public enum LLMProviders
X = 8,
DEEP_SEEK = 11,
ALIBABA_CLOUD = 12,
+ PERPLEXITY=14,
FIREWORKS = 5,
GROQ = 6,
diff --git a/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs b/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs
index 3dd9abe8..44d334b5 100644
--- a/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs
+++ b/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs
@@ -38,6 +38,7 @@ public static class LLMProvidersExtensions
LLMProviders.X => "xAI",
LLMProviders.DEEP_SEEK => "DeepSeek",
LLMProviders.ALIBABA_CLOUD => "Alibaba Cloud",
+ LLMProviders.PERPLEXITY => "Perplexity",
LLMProviders.GROQ => "Groq",
LLMProviders.FIREWORKS => "Fireworks.ai",
diff --git a/app/MindWork AI Studio/Provider/Perplexity/ProviderPerplexity.cs b/app/MindWork AI Studio/Provider/Perplexity/ProviderPerplexity.cs
new file mode 100644
index 00000000..369637be
--- /dev/null
+++ b/app/MindWork AI Studio/Provider/Perplexity/ProviderPerplexity.cs
@@ -0,0 +1,157 @@
+using System.Net.Http.Headers;
+using System.Runtime.CompilerServices;
+using System.Text;
+using System.Text.Json;
+
+using AIStudio.Chat;
+using AIStudio.Provider.OpenAI;
+using AIStudio.Settings;
+
+namespace AIStudio.Provider.Perplexity;
+
+public sealed class ProviderPerplexity(ILogger logger) : BaseProvider("https://api.perplexity.ai/", logger)
+{
+ #region Implementation of IProvider
+
+ ///
+ public override string Id => LLMProviders.PERPLEXITY.ToName();
+
+ ///
+ public override string InstanceName { get; set; } = "DeepSeek";
+
+ ///
+ public override async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
+ {
+ // Get the API key:
+ var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
+ if(!requestedSecret.Success)
+ yield break;
+
+ // Prepare the system prompt:
+ var systemPrompt = new Message
+ {
+ Role = "system",
+ Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread, this.logger),
+ };
+
+ // Prepare the DeepSeek HTTP chat request:
+ var deepSeekChatRequest = JsonSerializer.Serialize(new ChatRequest
+ {
+ Model = chatModel.Id,
+
+ // Build the messages:
+ // - First of all the system prompt
+ // - Then none-empty user and AI messages
+ Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
+ {
+ Role = n.Role switch
+ {
+ ChatRole.USER => "user",
+ ChatRole.AI => "assistant",
+ ChatRole.AGENT => "assistant",
+ ChatRole.SYSTEM => "system",
+
+ _ => "user",
+ },
+
+ Content = n.Content switch
+ {
+ ContentText text => text.Text,
+ _ => string.Empty,
+ }
+ }).ToList()],
+ Stream = true,
+ }, JSON_SERIALIZER_OPTIONS);
+
+ async Task RequestBuilder()
+ {
+ // Build the HTTP post request:
+ var request = new HttpRequestMessage(HttpMethod.Post, "chat/completions");
+
+ // Set the authorization header:
+ request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", await requestedSecret.Secret.Decrypt(ENCRYPTION));
+
+ // Set the content:
+ request.Content = new StringContent(deepSeekChatRequest, Encoding.UTF8, "application/json");
+ return request;
+ }
+
+ await foreach (var content in this.StreamChatCompletionInternal("DeepSeek", RequestBuilder, token))
+ yield return content;
+ }
+
+ #pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously
+ ///
+ public override async IAsyncEnumerable StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default)
+ {
+ yield break;
+ }
+ #pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
+
+ ///
+ public override Task> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
+ {
+ return this.LoadModels(token, apiKeyProvisional);
+ }
+
+ ///
+ public override Task> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
+ {
+ return Task.FromResult(Enumerable.Empty());
+ }
+
+ ///
+ public override Task> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
+ {
+ return Task.FromResult(Enumerable.Empty());
+ }
+
+ public override IReadOnlyCollection GetModelCapabilities(Model model)
+ {
+ var modelName = model.Id.ToLowerInvariant().AsSpan();
+
+ if(modelName.IndexOf("reasoner") is not -1)
+ return
+ [
+ Capability.TEXT_INPUT,
+ Capability.TEXT_OUTPUT,
+
+ Capability.ALWAYS_REASONING,
+ ];
+
+ return
+ [
+ Capability.TEXT_INPUT,
+ Capability.TEXT_OUTPUT,
+ ];
+ }
+
+
+ #endregion
+
+ private async Task> LoadModels(CancellationToken token, string? apiKeyProvisional = null)
+ {
+ var secretKey = apiKeyProvisional switch
+ {
+ not null => apiKeyProvisional,
+ _ => await RUST_SERVICE.GetAPIKey(this) switch
+ {
+ { Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
+ _ => null,
+ }
+ };
+
+ if (secretKey is null)
+ return [];
+
+ using var request = new HttpRequestMessage(HttpMethod.Get, "models");
+ request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
+
+ using var response = await this.httpClient.SendAsync(request, token);
+ if(!response.IsSuccessStatusCode)
+ return [];
+
+ var modelResponse = await response.Content.ReadFromJsonAsync(token);
+ return modelResponse.Data;
+ }
+}
\ No newline at end of file