2025-01-04 14:06:08 +00:00
|
|
|
using System.Net.Http.Headers;
|
|
|
|
|
using System.Runtime.CompilerServices;
|
|
|
|
|
using System.Text;
|
|
|
|
|
using System.Text.Json;
|
|
|
|
|
|
|
|
|
|
using AIStudio.Chat;
|
|
|
|
|
using AIStudio.Provider.OpenAI;
|
|
|
|
|
using AIStudio.Settings;
|
|
|
|
|
|
|
|
|
|
namespace AIStudio.Provider.X;
|
|
|
|
|
|
2025-12-30 17:30:32 +00:00
|
|
|
public sealed class ProviderX() : BaseProvider(LLMProviders.X, "https://api.x.ai/v1/", LOGGER)
|
2025-01-04 14:06:08 +00:00
|
|
|
{
|
2025-09-03 19:25:17 +00:00
|
|
|
private static readonly ILogger<ProviderX> LOGGER = Program.LOGGER_FACTORY.CreateLogger<ProviderX>();
|
|
|
|
|
|
2025-01-04 14:06:08 +00:00
|
|
|
#region Implementation of IProvider
|
|
|
|
|
|
|
|
|
|
/// <inheritdoc />
|
|
|
|
|
public override string Id => LLMProviders.X.ToName();
|
|
|
|
|
|
|
|
|
|
/// <inheritdoc />
|
|
|
|
|
public override string InstanceName { get; set; } = "xAI";
|
|
|
|
|
|
|
|
|
|
/// <inheritdoc />
|
2025-08-31 12:27:35 +00:00
|
|
|
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
2025-01-04 14:06:08 +00:00
|
|
|
{
|
|
|
|
|
// Get the API key:
|
|
|
|
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
|
|
|
|
if(!requestedSecret.Success)
|
|
|
|
|
yield break;
|
|
|
|
|
|
|
|
|
|
// Prepare the system prompt:
|
2025-12-28 13:10:20 +00:00
|
|
|
var systemPrompt = new TextMessage
|
2025-01-04 14:06:08 +00:00
|
|
|
{
|
|
|
|
|
Role = "system",
|
2025-09-03 19:25:17 +00:00
|
|
|
Content = chatThread.PrepareSystemPrompt(settingsManager, chatThread),
|
2025-01-04 14:06:08 +00:00
|
|
|
};
|
|
|
|
|
|
2025-11-13 17:13:16 +00:00
|
|
|
// Parse the API parameters:
|
|
|
|
|
var apiParameters = this.ParseAdditionalApiParameters();
|
|
|
|
|
|
2025-12-10 12:48:13 +00:00
|
|
|
// Build the list of messages:
|
2025-12-30 17:30:32 +00:00
|
|
|
var messages = await chatThread.Blocks.BuildMessagesUsingNestedImageUrlAsync(this.Provider, chatModel);
|
2025-12-10 12:48:13 +00:00
|
|
|
|
2025-01-04 14:06:08 +00:00
|
|
|
// Prepare the xAI HTTP chat request:
|
2025-09-03 08:08:04 +00:00
|
|
|
var xChatRequest = JsonSerializer.Serialize(new ChatCompletionAPIRequest
|
2025-01-04 14:06:08 +00:00
|
|
|
{
|
|
|
|
|
Model = chatModel.Id,
|
|
|
|
|
|
|
|
|
|
// Build the messages:
|
|
|
|
|
// - First of all the system prompt
|
|
|
|
|
// - Then none-empty user and AI messages
|
2025-12-10 12:48:13 +00:00
|
|
|
Messages = [systemPrompt, ..messages],
|
2025-01-04 14:06:08 +00:00
|
|
|
|
|
|
|
|
// Right now, we only support streaming completions:
|
|
|
|
|
Stream = true,
|
2025-11-13 17:13:16 +00:00
|
|
|
AdditionalApiParameters = apiParameters
|
2025-01-04 14:06:08 +00:00
|
|
|
}, JSON_SERIALIZER_OPTIONS);
|
|
|
|
|
|
|
|
|
|
async Task<HttpRequestMessage> RequestBuilder()
|
|
|
|
|
{
|
|
|
|
|
// Build the HTTP post request:
|
|
|
|
|
var request = new HttpRequestMessage(HttpMethod.Post, "chat/completions");
|
|
|
|
|
|
|
|
|
|
// Set the authorization header:
|
|
|
|
|
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", await requestedSecret.Secret.Decrypt(ENCRYPTION));
|
|
|
|
|
|
|
|
|
|
// Set the content:
|
|
|
|
|
request.Content = new StringContent(xChatRequest, Encoding.UTF8, "application/json");
|
|
|
|
|
return request;
|
|
|
|
|
}
|
|
|
|
|
|
2025-09-03 08:08:04 +00:00
|
|
|
await foreach (var content in this.StreamChatCompletionInternal<ChatCompletionDeltaStreamLine, NoChatCompletionAnnotationStreamLine>("xAI", RequestBuilder, token))
|
2025-01-04 14:06:08 +00:00
|
|
|
yield return content;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously
|
|
|
|
|
/// <inheritdoc />
|
|
|
|
|
public override async IAsyncEnumerable<ImageURL> StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default)
|
|
|
|
|
{
|
|
|
|
|
yield break;
|
|
|
|
|
}
|
|
|
|
|
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
|
|
|
|
|
|
|
|
|
/// <inheritdoc />
|
2025-04-24 10:45:43 +00:00
|
|
|
public override async Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
2025-01-04 14:06:08 +00:00
|
|
|
{
|
2025-04-24 10:45:43 +00:00
|
|
|
var models = await this.LoadModels(["grok-"], token, apiKeyProvisional);
|
|
|
|
|
return models.Where(n => !n.Id.Contains("-image", StringComparison.OrdinalIgnoreCase));
|
2025-01-04 14:06:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// <inheritdoc />
|
|
|
|
|
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
|
|
|
|
{
|
|
|
|
|
return Task.FromResult<IEnumerable<Model>>([]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// <inheritdoc />
|
|
|
|
|
public override Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
|
|
|
|
{
|
|
|
|
|
return Task.FromResult<IEnumerable<Model>>([]);
|
|
|
|
|
}
|
2025-05-11 10:51:35 +00:00
|
|
|
|
2025-01-04 14:06:08 +00:00
|
|
|
#endregion
|
|
|
|
|
|
|
|
|
|
private async Task<IEnumerable<Model>> LoadModels(string[] prefixes, CancellationToken token, string? apiKeyProvisional = null)
|
|
|
|
|
{
|
|
|
|
|
var secretKey = apiKeyProvisional switch
|
|
|
|
|
{
|
|
|
|
|
not null => apiKeyProvisional,
|
|
|
|
|
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
|
|
|
|
{
|
|
|
|
|
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
|
|
|
|
_ => null,
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
if (secretKey is null)
|
|
|
|
|
return [];
|
|
|
|
|
|
2025-02-09 11:36:37 +00:00
|
|
|
using var request = new HttpRequestMessage(HttpMethod.Get, "models");
|
2025-01-04 14:06:08 +00:00
|
|
|
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", secretKey);
|
|
|
|
|
|
2025-02-09 11:36:37 +00:00
|
|
|
using var response = await this.httpClient.SendAsync(request, token);
|
2025-01-04 14:06:08 +00:00
|
|
|
if(!response.IsSuccessStatusCode)
|
|
|
|
|
return [];
|
|
|
|
|
|
|
|
|
|
var modelResponse = await response.Content.ReadFromJsonAsync<ModelsResponse>(token);
|
|
|
|
|
|
|
|
|
|
//
|
|
|
|
|
// The API does not return the alias model names, so we have to add them manually:
|
|
|
|
|
// Right now, the only alias to add is `grok-2-latest`.
|
|
|
|
|
//
|
|
|
|
|
return modelResponse.Data.Where(model => prefixes.Any(prefix => model.Id.StartsWith(prefix, StringComparison.InvariantCulture)))
|
|
|
|
|
.Concat([
|
|
|
|
|
new Model
|
|
|
|
|
{
|
|
|
|
|
Id = "grok-2-latest",
|
|
|
|
|
DisplayName = "Grok 2.0 (latest)",
|
|
|
|
|
}
|
|
|
|
|
]);
|
|
|
|
|
}
|
|
|
|
|
}
|