using System.Runtime.CompilerServices; using AIStudio.Chat; using AIStudio.Provider.OpenAI; using AIStudio.Settings; namespace AIStudio.Provider.X; public sealed class ProviderX() : BaseProvider(LLMProviders.X, "https://api.x.ai/v1/", LOGGER) { private static readonly ILogger LOGGER = Program.LOGGER_FACTORY.CreateLogger(); #region Implementation of IProvider /// public override string Id => LLMProviders.X.ToName(); /// public override string InstanceName { get; set; } = "xAI"; /// public override async IAsyncEnumerable StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default) { await foreach (var content in this.StreamOpenAICompatibleChatCompletion( "xAI", chatModel, chatThread, settingsManager, async (systemPrompt, apiParameters) => { // Build the list of messages: var messages = await chatThread.Blocks.BuildMessagesUsingNestedImageUrlAsync(this.Provider, chatModel); return new ChatCompletionAPIRequest { Model = chatModel.Id, // Build the messages: // - First of all the system prompt // - Then none-empty user and AI messages Messages = [systemPrompt, ..messages], // Right now, we only support streaming completions: Stream = true, AdditionalApiParameters = apiParameters }; }, token: token)) yield return content; } #pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously /// public override async IAsyncEnumerable StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default) { yield break; } #pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously /// public override Task TranscribeAudioAsync(Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default) { return Task.FromResult(string.Empty); } /// public override Task>> EmbedTextAsync(Model embeddingModel, SettingsManager settingsManager, CancellationToken token = default, params List texts) { return Task.FromResult>>([]); } /// public override async Task GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default) { var result = await this.LoadModels(SecretStoreType.LLM_PROVIDER, ["grok-"], token, apiKeyProvisional); return result with { Models = [..result.Models.Where(n => !n.Id.Contains("-image", StringComparison.OrdinalIgnoreCase))] }; } /// public override Task GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default) { return Task.FromResult(ModelLoadResult.FromModels([])); } /// public override Task GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default) { return Task.FromResult(ModelLoadResult.FromModels([])); } /// public override Task GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default) { return Task.FromResult(ModelLoadResult.FromModels([])); } #endregion private Task LoadModels(SecretStoreType storeType, string[] prefixes, CancellationToken token, string? apiKeyProvisional = null) { return this.LoadModelsResponse( storeType, "models", modelResponse => modelResponse.Data.Where(model => prefixes.Any(prefix => model.Id.StartsWith(prefix, StringComparison.InvariantCulture))) .Concat([ new Model { Id = "grok-2-latest", DisplayName = "Grok 2.0 (latest)", } ]), token, apiKeyProvisional); } }