Added known Perplexity models

This commit is contained in:
Thorsten Sommer 2025-08-30 11:26:56 +02:00
parent c2d58e05aa
commit 5574616160
Signed by: tsommer
GPG Key ID: 371BBA77A02C0108
3 changed files with 15 additions and 33 deletions

View File

@ -126,13 +126,13 @@ public partial class ProviderDialog : MSGComponentBase, ISecretId
Id = this.DataId, Id = this.DataId,
InstanceName = this.DataInstanceName, InstanceName = this.DataInstanceName,
UsedLLMProvider = this.DataLLMProvider, UsedLLMProvider = this.DataLLMProvider,
Model = this.DataLLMProvider switch Model = this.DataLLMProvider switch
{ {
LLMProviders.FIREWORKS => new Model(this.dataManuallyModel, null), LLMProviders.FIREWORKS or LLMProviders.HUGGINGFACE => new Model(this.dataManuallyModel, null),
LLMProviders.HUGGINGFACE => new Model(this.dataManuallyModel, null),
LLMProviders.PERPLEXITY => new Model(this.dataManuallyModel, null),
_ => this.DataModel _ => this.DataModel
}, },
IsSelfHosted = this.DataLLMProvider is LLMProviders.SELF_HOSTED, IsSelfHosted = this.DataLLMProvider is LLMProviders.SELF_HOSTED,
IsEnterpriseConfiguration = false, IsEnterpriseConfiguration = false,
Hostname = cleanedHostname.EndsWith('/') ? cleanedHostname[..^1] : cleanedHostname, Hostname = cleanedHostname.EndsWith('/') ? cleanedHostname[..^1] : cleanedHostname,

View File

@ -250,7 +250,6 @@ public static class LLMProvidersExtensions
{ {
LLMProviders.FIREWORKS => "https://fireworks.ai/models?show=Serverless", LLMProviders.FIREWORKS => "https://fireworks.ai/models?show=Serverless",
LLMProviders.HUGGINGFACE => $"https://huggingface.co/models?inference_provider={inferenceProvider.EndpointsId()}", LLMProviders.HUGGINGFACE => $"https://huggingface.co/models?inference_provider={inferenceProvider.EndpointsId()}",
LLMProviders.PERPLEXITY => "https://docs.perplexity.ai/api-reference/chat-completions-post#body-model",
_ => string.Empty, _ => string.Empty,
}; };
@ -258,7 +257,6 @@ public static class LLMProvidersExtensions
{ {
LLMProviders.FIREWORKS => true, LLMProviders.FIREWORKS => true,
LLMProviders.HUGGINGFACE => true, LLMProviders.HUGGINGFACE => true,
LLMProviders.PERPLEXITY => true,
_ => false, _ => false,
}; };

View File

@ -11,6 +11,15 @@ namespace AIStudio.Provider.Perplexity;
public sealed class ProviderPerplexity(ILogger logger) : BaseProvider("https://api.perplexity.ai/", logger) public sealed class ProviderPerplexity(ILogger logger) : BaseProvider("https://api.perplexity.ai/", logger)
{ {
private static readonly Model[] KNOWN_MODELS =
[
new("sonar", "Sonar"),
new("sonar-pro", "Sonar Pro"),
new("sonar-reasoning", "Sonar Reasoning"),
new("sonar-reasoning-pro", "Sonar Reasoning Pro"),
new("sonar-deep-research", "Sonar Deep Research"),
];
#region Implementation of IProvider #region Implementation of IProvider
/// <inheritdoc /> /// <inheritdoc />
@ -91,7 +100,7 @@ public sealed class ProviderPerplexity(ILogger logger) : BaseProvider("https://a
/// <inheritdoc /> /// <inheritdoc />
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
{ {
return this.LoadModels(token, apiKeyProvisional); return this.LoadModels();
} }
/// <inheritdoc /> /// <inheritdoc />
@ -132,33 +141,8 @@ public sealed class ProviderPerplexity(ILogger logger) : BaseProvider("https://a
Capability.IMAGE_OUTPUT, Capability.IMAGE_OUTPUT,
]; ];
} }
#endregion #endregion
private async Task<IEnumerable<Model>> LoadModels(CancellationToken token, string? apiKeyProvisional = null) private Task<IEnumerable<Model>> LoadModels() => Task.FromResult<IEnumerable<Model>>(KNOWN_MODELS);
{
var secretKey = apiKeyProvisional switch
{
not null => apiKeyProvisional,
_ => await RUST_SERVICE.GetAPIKey(this) switch
{
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
_ => null,
}
};
if (secretKey is null)
return [];
using var request = new HttpRequestMessage(HttpMethod.Get, "models");
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
using var response = await this.httpClient.SendAsync(request, token);
if(!response.IsSuccessStatusCode)
return [];
var modelResponse = await response.Content.ReadFromJsonAsync<ModelsResponse>(token);
return modelResponse.Data;
}
} }