diff --git a/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs b/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs
index 0c692811..91dfba8e 100644
--- a/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs
+++ b/app/MindWork AI Studio/Provider/LLMProvidersExtensions.cs
@@ -132,7 +132,7 @@ public static class LLMProvidersExtensions
LLMProviders.DEEP_SEEK => false,
LLMProviders.HUGGINGFACE => false,
LLMProviders.PERPLEXITY => false,
- LLMProviders.OPEN_ROUTER => false,
+ LLMProviders.OPEN_ROUTER => true,
//
// Self-hosted providers are treated as a special case anyway.
diff --git a/app/MindWork AI Studio/Provider/OpenRouter/ProviderOpenRouter.cs b/app/MindWork AI Studio/Provider/OpenRouter/ProviderOpenRouter.cs
index e2014adc..b0c70aa4 100644
--- a/app/MindWork AI Studio/Provider/OpenRouter/ProviderOpenRouter.cs
+++ b/app/MindWork AI Studio/Provider/OpenRouter/ProviderOpenRouter.cs
@@ -122,7 +122,7 @@ public sealed class ProviderOpenRouter() : BaseProvider("https://openrouter.ai/a
///
public override Task> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
- return Task.FromResult(Enumerable.Empty());
+ return this.LoadEmbeddingModels(token, apiKeyProvisional);
}
#endregion
@@ -164,4 +164,32 @@ public sealed class ProviderOpenRouter() : BaseProvider("https://openrouter.ai/a
!n.Id.Contains("midjourney", StringComparison.OrdinalIgnoreCase))
.Select(n => new Model(n.Id, n.Name));
}
+
+ private async Task> LoadEmbeddingModels(CancellationToken token, string? apiKeyProvisional = null)
+ {
+ var secretKey = apiKeyProvisional switch
+ {
+ not null => apiKeyProvisional,
+ _ => await RUST_SERVICE.GetAPIKey(this) switch
+ {
+ { Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
+ _ => null,
+ }
+ };
+
+ if (secretKey is null)
+ return [];
+
+ using var request = new HttpRequestMessage(HttpMethod.Get, "embeddings/models");
+ request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
+
+ using var response = await this.httpClient.SendAsync(request, token);
+ if(!response.IsSuccessStatusCode)
+ return [];
+
+ var modelResponse = await response.Content.ReadFromJsonAsync(token);
+
+ // Convert all embedding models to Model
+ return modelResponse.Data.Select(n => new Model(n.Id, n.Name));
+ }
}