First implementation of OpenRouter

This commit is contained in:
Thorsten Sommer 2025-12-15 17:54:03 +01:00
parent cfad42ddf4
commit b8d85d1eea
Signed by: tsommer
GPG Key ID: 371BBA77A02C0108
5 changed files with 438 additions and 11 deletions

View File

@ -15,7 +15,8 @@ public enum LLMProviders
DEEP_SEEK = 11,
ALIBABA_CLOUD = 12,
PERPLEXITY = 14,
OPEN_ROUTER = 15,
FIREWORKS = 5,
GROQ = 6,
HUGGINGFACE = 13,

View File

@ -9,6 +9,7 @@ using AIStudio.Provider.Helmholtz;
using AIStudio.Provider.HuggingFace;
using AIStudio.Provider.Mistral;
using AIStudio.Provider.OpenAI;
using AIStudio.Provider.OpenRouter;
using AIStudio.Provider.Perplexity;
using AIStudio.Provider.SelfHosted;
using AIStudio.Provider.X;
@ -42,7 +43,8 @@ public static class LLMProvidersExtensions
LLMProviders.DEEP_SEEK => "DeepSeek",
LLMProviders.ALIBABA_CLOUD => "Alibaba Cloud",
LLMProviders.PERPLEXITY => "Perplexity",
LLMProviders.OPEN_ROUTER => "OpenRouter",
LLMProviders.GROQ => "Groq",
LLMProviders.FIREWORKS => "Fireworks.ai",
LLMProviders.HUGGINGFACE => "Hugging Face",
@ -92,7 +94,9 @@ public static class LLMProvidersExtensions
LLMProviders.ALIBABA_CLOUD => Confidence.CHINA_NO_TRAINING.WithRegion("Asia").WithSources("https://www.alibabacloud.com/help/en/model-studio/support/faq-about-alibaba-cloud-model-studio").WithLevel(settingsManager.GetConfiguredConfidenceLevel(llmProvider)),
LLMProviders.PERPLEXITY => Confidence.USA_NO_TRAINING.WithRegion("America, U.S.").WithSources("https://www.perplexity.ai/hub/legal/perplexity-api-terms-of-service").WithLevel(settingsManager.GetConfiguredConfidenceLevel(llmProvider)),
LLMProviders.OPEN_ROUTER => Confidence.USA_HUB.WithRegion("America, U.S.").WithSources("https://openrouter.ai/privacy", "https://openrouter.ai/terms").WithLevel(settingsManager.GetConfiguredConfidenceLevel(llmProvider)),
LLMProviders.SELF_HOSTED => Confidence.SELF_HOSTED.WithLevel(settingsManager.GetConfiguredConfidenceLevel(llmProvider)),
LLMProviders.HELMHOLTZ => Confidence.GDPR_NO_TRAINING.WithRegion("Europe, Germany").WithSources("https://helmholtz.cloud/services/?serviceID=d7d5c597-a2f6-4bd1-b71e-4d6499d98570").WithLevel(settingsManager.GetConfiguredConfidenceLevel(llmProvider)),
@ -128,7 +132,8 @@ public static class LLMProvidersExtensions
LLMProviders.DEEP_SEEK => false,
LLMProviders.HUGGINGFACE => false,
LLMProviders.PERPLEXITY => false,
LLMProviders.OPEN_ROUTER => false,
//
// Self-hosted providers are treated as a special case anyway.
//
@ -171,7 +176,8 @@ public static class LLMProvidersExtensions
LLMProviders.DEEP_SEEK => new ProviderDeepSeek { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
LLMProviders.ALIBABA_CLOUD => new ProviderAlibabaCloud { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
LLMProviders.PERPLEXITY => new ProviderPerplexity { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
LLMProviders.OPEN_ROUTER => new ProviderOpenRouter { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
LLMProviders.GROQ => new ProviderGroq { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
LLMProviders.FIREWORKS => new ProviderFireworks { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
LLMProviders.HUGGINGFACE => new ProviderHuggingFace(inferenceProvider, model) { InstanceName = instanceName, AdditionalJsonApiParameters = expertProviderApiParameter },
@ -201,7 +207,8 @@ public static class LLMProvidersExtensions
LLMProviders.DEEP_SEEK => "https://platform.deepseek.com/sign_up",
LLMProviders.ALIBABA_CLOUD => "https://account.alibabacloud.com/register/intl_register.htm",
LLMProviders.PERPLEXITY => "https://www.perplexity.ai/account/api",
LLMProviders.OPEN_ROUTER => "https://openrouter.ai/keys",
LLMProviders.GROQ => "https://console.groq.com/",
LLMProviders.FIREWORKS => "https://fireworks.ai/login",
LLMProviders.HUGGINGFACE => "https://huggingface.co/login",
@ -224,8 +231,9 @@ public static class LLMProvidersExtensions
LLMProviders.DEEP_SEEK => "https://platform.deepseek.com/usage",
LLMProviders.ALIBABA_CLOUD => "https://usercenter2-intl.aliyun.com/billing",
LLMProviders.PERPLEXITY => "https://www.perplexity.ai/account/api/",
LLMProviders.OPEN_ROUTER => "https://openrouter.ai/activity",
LLMProviders.HUGGINGFACE => "https://huggingface.co/settings/billing",
_ => string.Empty,
};
@ -241,8 +249,9 @@ public static class LLMProvidersExtensions
LLMProviders.DEEP_SEEK => true,
LLMProviders.ALIBABA_CLOUD => true,
LLMProviders.PERPLEXITY => true,
LLMProviders.OPEN_ROUTER => true,
LLMProviders.HUGGINGFACE => true,
_ => false,
};
@ -288,7 +297,8 @@ public static class LLMProvidersExtensions
LLMProviders.DEEP_SEEK => true,
LLMProviders.ALIBABA_CLOUD => true,
LLMProviders.PERPLEXITY => true,
LLMProviders.OPEN_ROUTER => true,
LLMProviders.GROQ => true,
LLMProviders.FIREWORKS => true,
LLMProviders.HELMHOLTZ => true,
@ -310,7 +320,8 @@ public static class LLMProvidersExtensions
LLMProviders.DEEP_SEEK => true,
LLMProviders.ALIBABA_CLOUD => true,
LLMProviders.PERPLEXITY => true,
LLMProviders.OPEN_ROUTER => true,
LLMProviders.GROQ => true,
LLMProviders.FIREWORKS => true,
LLMProviders.HELMHOLTZ => true,

View File

@ -0,0 +1,165 @@
using System.Net.Http.Headers;
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.Json;
using AIStudio.Chat;
using AIStudio.Provider.OpenAI;
using AIStudio.Settings;
namespace AIStudio.Provider.OpenRouter;
public sealed class ProviderOpenRouter() : BaseProvider("https://openrouter.ai/api/v1/", LOGGER)
{
private static readonly ILogger<ProviderOpenRouter> LOGGER = Program.LOGGER_FACTORY.CreateLogger<ProviderOpenRouter>();
#region Implementation of IProvider
/// <inheritdoc />
public override string Id => LLMProviders.OPEN_ROUTER.ToName();
/// <inheritdoc />
public override string InstanceName { get; set; } = "OpenRouter";
/// <inheritdoc />
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
{
// Get the API key:
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
if(!requestedSecret.Success)
yield break;
// Prepare the system prompt:
var systemPrompt = new Message
{
Role = "system",
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
};
// Parse the API parameters:
var apiParameters = this.ParseAdditionalApiParameters();
// Extract custom header values from API parameters if present
const string PROJECT_WEBSITE = "https://github.com/MindWorkAI/AI-Studio";
const string PROJECT_NAME = "MindWork AI Studio";
// Build the list of messages:
var messages = await chatThread.Blocks.BuildMessages(async n => new Message
{
Role = n.Role switch
{
ChatRole.USER => "user",
ChatRole.AI => "assistant",
ChatRole.AGENT => "assistant",
ChatRole.SYSTEM => "system",
_ => "user",
},
Content = n.Content switch
{
ContentText text => await text.PrepareContentForAI(),
_ => string.Empty,
}
});
// Prepare the OpenRouter HTTP chat request:
var openRouterChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
{
Model = chatModel.Id,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
Messages = [systemPrompt, ..messages],
// Right now, we only support streaming completions:
Stream = true,
AdditionalApiParameters = apiParameters
}, JSON_SERIALIZER_OPTIONS);
async Task<HttpRequestMessage> RequestBuilder()
{
// Build the HTTP post request:
var request = new HttpRequestMessage(HttpMethod.Post, "chat/completions");
// Set the authorization header:
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", await requestedSecret.Secret.Decrypt(ENCRYPTION));
// Set custom headers for project identification:
request.Headers.Add("HTTP-Referer", PROJECT_WEBSITE);
request.Headers.Add("X-Title", PROJECT_NAME);
// Set the content:
request.Content = new StringContent(openRouterChatRequest, Encoding.UTF8, "application/json");
return request;
}
await foreach (var content in this.StreamChatCompletionInternal<ChatCompletionDeltaStreamLine, NoChatCompletionAnnotationStreamLine>("OpenRouter", RequestBuilder, token))
yield return content;
}
#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously
/// <inheritdoc />
public override async IAsyncEnumerable<ImageURL> StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default)
{
yield break;
}
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return this.LoadModels(token, apiKeyProvisional);
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Model>());
}
#endregion
private async Task<IEnumerable<Model>> LoadModels(CancellationToken token, string? apiKeyProvisional = null)
{
var secretKey = apiKeyProvisional switch
{
not null => apiKeyProvisional,
_ => await RUST_SERVICE.GetAPIKey(this) switch
{
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
_ => null,
}
};
if (secretKey is null)
return [];
using var request = new HttpRequestMessage(HttpMethod.Get, "models");
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
using var response = await this.httpClient.SendAsync(request, token);
if(!response.IsSuccessStatusCode)
return [];
var modelResponse = await response.Content.ReadFromJsonAsync<ModelsResponse>(token);
// Filter out non-text models (image, audio, embedding models)
return modelResponse.Data.Where(n =>
!n.Id.Contains("whisper", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("dall-e", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("tts", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("embedding", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("moderation", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("stable-diffusion", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("flux", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("midjourney", StringComparison.OrdinalIgnoreCase));
}
}

View File

@ -0,0 +1,249 @@
using AIStudio.Provider;
namespace AIStudio.Settings;
public static partial class ProviderExtensions
{
public static List<Capability> GetModelCapabilitiesOpenRouter(Model model)
{
var modelName = model.Id.ToLowerInvariant().AsSpan();
//
// OpenRouter model IDs follow the pattern: "provider/model-name"
// Examples:
// - openai/gpt-4o
// - anthropic/claude-3-5-sonnet
// - google/gemini-pro-1.5
// - meta-llama/llama-3.1-405b-instruct
//
// We need to detect capabilities based on both provider and model name.
//
//
// OpenAI models via OpenRouter:
//
if (modelName.IndexOf("openai/") is not -1)
{
// Reasoning models (o1, o3, o4 series)
if (modelName.IndexOf("/o1") is not -1 ||
modelName.IndexOf("/o3") is not -1 ||
modelName.IndexOf("/o4") is not -1)
return [
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.ALWAYS_REASONING,
Capability.CHAT_COMPLETION_API,
];
// GPT-4o and GPT-5 series with multimodal
if (modelName.IndexOf("/gpt-4o") is not -1 ||
modelName.IndexOf("/gpt-5") is not -1 ||
modelName.IndexOf("/chatgpt-4o") is not -1)
return [
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// Standard GPT-4
if (modelName.IndexOf("/gpt-4") is not -1)
return [
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// GPT-3.5
if (modelName.IndexOf("/gpt-3.5") is not -1 ||
modelName.IndexOf("/gpt-3") is not -1)
return [
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
}
//
// Anthropic models via OpenRouter:
//
if (modelName.IndexOf("anthropic/") is not -1)
{
// Claude 3.5 and newer with vision
if (modelName.IndexOf("/claude-3.5") is not -1 ||
modelName.IndexOf("/claude-3-5") is not -1)
return [
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// Claude 3 Opus/Sonnet with vision
if (modelName.IndexOf("/claude-3-opus") is not -1 ||
modelName.IndexOf("/claude-3-sonnet") is not -1)
return [
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// Other Claude 3 models
if (modelName.IndexOf("/claude-3") is not -1)
return [
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
}
//
// Google models via OpenRouter:
//
if (modelName.IndexOf("google/") is not -1)
{
// Gemini models with multimodal
if (modelName.IndexOf("/gemini") is not -1)
return [
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
}
//
// xAI Grok models via OpenRouter:
//
if (modelName.IndexOf("x-ai/") is not -1 || modelName.IndexOf("/grok") is not -1)
{
if (modelName.IndexOf("-vision") is not -1)
return [
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
return [
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
}
//
// DeepSeek models via OpenRouter:
//
if (modelName.IndexOf("/deepseek") is not -1)
{
if (modelName.IndexOf("-r1") is not -1 || modelName.IndexOf(" r1") is not -1)
return [
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.ALWAYS_REASONING,
Capability.CHAT_COMPLETION_API,
];
return [
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
}
//
// Mistral models via OpenRouter:
//
if (modelName.IndexOf("/mistral") is not -1 || modelName.IndexOf("/pixtral") is not -1)
{
if (modelName.IndexOf("/pixtral") is not -1)
return [
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
return [
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
}
//
// Meta Llama models via OpenRouter:
//
if (modelName.IndexOf("/llama") is not -1)
{
// Llama 4 with vision
if (modelName.IndexOf("/llama-4") is not -1 ||
modelName.IndexOf("/llama4") is not -1)
return [
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// Vision models
if (modelName.IndexOf("-vision") is not -1)
return [
Capability.TEXT_INPUT, Capability.MULTIPLE_IMAGE_INPUT,
Capability.TEXT_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
// Llama 3.1+ with function calling
if (modelName.IndexOf("/llama-3.") is not -1 ||
modelName.IndexOf("/llama3.") is not -1)
return [
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.FUNCTION_CALLING,
Capability.CHAT_COMPLETION_API,
];
// Default Llama
return [
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
}
//
// Qwen models via OpenRouter:
//
if (modelName.IndexOf("/qwen") is not -1 || modelName.IndexOf("/qwq") is not -1)
{
if (modelName.IndexOf("/qwq") is not -1)
return [
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.ALWAYS_REASONING,
Capability.CHAT_COMPLETION_API,
];
return [
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
}
//
// Default for unknown models:
// Assume basic text input/output with chat completion
//
return [
Capability.TEXT_INPUT,
Capability.TEXT_OUTPUT,
Capability.CHAT_COMPLETION_API,
];
}
}

View File

@ -14,7 +14,8 @@ public static partial class ProviderExtensions
LLMProviders.DEEP_SEEK => GetModelCapabilitiesDeepSeek(provider.Model),
LLMProviders.ALIBABA_CLOUD => GetModelCapabilitiesAlibaba(provider.Model),
LLMProviders.PERPLEXITY => GetModelCapabilitiesPerplexity(provider.Model),
LLMProviders.OPEN_ROUTER => GetModelCapabilitiesOpenRouter(provider.Model),
LLMProviders.GROQ => GetModelCapabilitiesOpenSource(provider.Model),
LLMProviders.FIREWORKS => GetModelCapabilitiesOpenSource(provider.Model),
LLMProviders.HUGGINGFACE => GetModelCapabilitiesOpenSource(provider.Model),