2024-11-09 21:04:00 +00:00
using System.Net.Http.Headers ;
using System.Runtime.CompilerServices ;
using System.Text ;
using System.Text.Json ;
using AIStudio.Chat ;
using AIStudio.Provider.OpenAI ;
2025-01-02 13:50:54 +00:00
using AIStudio.Settings ;
2024-11-09 21:04:00 +00:00
namespace AIStudio.Provider.Google ;
2026-01-06 09:28:48 +00:00
public class ProviderGoogle ( ) : BaseProvider ( LLMProviders . GOOGLE , "https://generativelanguage.googleapis.com/v1beta/openai/" , LOGGER )
2024-11-09 21:04:00 +00:00
{
2025-09-03 19:25:17 +00:00
private static readonly ILogger < ProviderGoogle > LOGGER = Program . LOGGER_FACTORY . CreateLogger < ProviderGoogle > ( ) ;
2024-11-09 21:04:00 +00:00
#region Implementation of IProvider
/// <inheritdoc />
2024-12-03 14:24:40 +00:00
public override string Id = > LLMProviders . GOOGLE . ToName ( ) ;
2024-11-09 21:04:00 +00:00
/// <inheritdoc />
2024-12-03 14:24:40 +00:00
public override string InstanceName { get ; set ; } = "Google Gemini" ;
2024-11-09 21:04:00 +00:00
/// <inheritdoc />
2026-02-20 08:10:53 +00:00
public override async IAsyncEnumerable < ContentStreamChunk > StreamChatCompletion ( Model chatModel , ChatThread chatThread , SettingsManager settingsManager , [ EnumeratorCancellation ] CancellationToken token = default )
2024-11-09 21:04:00 +00:00
{
// Get the API key:
2026-01-11 15:02:28 +00:00
var requestedSecret = await RUST_SERVICE . GetAPIKey ( this , SecretStoreType . LLM_PROVIDER ) ;
2024-11-09 21:04:00 +00:00
if ( ! requestedSecret . Success )
yield break ;
// Prepare the system prompt:
2025-12-28 13:10:20 +00:00
var systemPrompt = new TextMessage
2024-11-09 21:04:00 +00:00
{
Role = "system" ,
2026-01-18 19:36:04 +00:00
Content = chatThread . PrepareSystemPrompt ( settingsManager ) ,
2024-11-09 21:04:00 +00:00
} ;
2025-11-13 17:13:16 +00:00
// Parse the API parameters:
var apiParameters = this . ParseAdditionalApiParameters ( ) ;
2025-12-10 12:48:13 +00:00
// Build the list of messages:
2025-12-30 17:30:32 +00:00
var messages = await chatThread . Blocks . BuildMessagesUsingNestedImageUrlAsync ( this . Provider , chatModel ) ;
2025-12-10 12:48:13 +00:00
2024-11-09 21:04:00 +00:00
// Prepare the Google HTTP chat request:
var geminiChatRequest = JsonSerializer . Serialize ( new ChatRequest
{
Model = chatModel . Id ,
// Build the messages:
// - First of all the system prompt
// - Then none-empty user and AI messages
2025-12-10 12:48:13 +00:00
Messages = [ systemPrompt , . . messages ] ,
2024-11-09 21:04:00 +00:00
// Right now, we only support streaming completions:
Stream = true ,
2025-11-13 17:13:16 +00:00
AdditionalApiParameters = apiParameters
2024-11-09 21:04:00 +00:00
} , JSON_SERIALIZER_OPTIONS ) ;
2025-01-04 13:11:32 +00:00
async Task < HttpRequestMessage > RequestBuilder ( )
2025-01-01 14:49:27 +00:00
{
2025-01-04 13:11:32 +00:00
// Build the HTTP post request:
var request = new HttpRequestMessage ( HttpMethod . Post , "chat/completions" ) ;
2025-01-01 14:49:27 +00:00
2025-01-04 13:11:32 +00:00
// Set the authorization header:
request . Headers . Authorization = new AuthenticationHeaderValue ( "Bearer" , await requestedSecret . Secret . Decrypt ( ENCRYPTION ) ) ;
2025-01-01 14:49:27 +00:00
2025-01-04 13:11:32 +00:00
// Set the content:
request . Content = new StringContent ( geminiChatRequest , Encoding . UTF8 , "application/json" ) ;
return request ;
2025-01-04 11:37:49 +00:00
}
2024-11-09 21:04:00 +00:00
2025-09-03 08:08:04 +00:00
await foreach ( var content in this . StreamChatCompletionInternal < ChatCompletionDeltaStreamLine , NoChatCompletionAnnotationStreamLine > ( "Google" , RequestBuilder , token ) )
2025-01-04 13:11:32 +00:00
yield return content ;
2024-11-09 21:04:00 +00:00
}
#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously
/// <inheritdoc />
2026-02-20 08:10:53 +00:00
public override async IAsyncEnumerable < ImageURL > StreamImageCompletion ( Model imageModel , string promptPositive , string promptNegative = FilterOperator . String . Empty , ImageURL referenceImageURL = default , [ EnumeratorCancellation ] CancellationToken token = default )
2024-11-09 21:04:00 +00:00
{
yield break ;
}
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
2026-01-11 15:02:28 +00:00
/// <inheritdoc />
2026-02-20 08:10:53 +00:00
public override Task < string > TranscribeAudioAsync ( Model transcriptionModel , string audioFilePath , SettingsManager settingsManager , CancellationToken token = default )
2026-01-11 15:02:28 +00:00
{
return Task . FromResult ( string . Empty ) ;
}
2024-11-09 21:04:00 +00:00
/// <inheritdoc />
2026-02-20 08:10:53 +00:00
public override async Task < IEnumerable < Model > > GetTextModels ( string? apiKeyProvisional = null , CancellationToken token = default )
2024-11-09 21:04:00 +00:00
{
2026-02-20 08:10:53 +00:00
var models = await this . LoadModels ( SecretStoreType . LLM_PROVIDER , token , apiKeyProvisional ) ;
return models . Where ( model = >
model . Id . StartsWith ( "gemini-" , StringComparison . OrdinalIgnoreCase ) & &
! this . IsEmbeddingModel ( model . Id ) )
. Select ( this . WithDisplayNameFallback ) ;
2024-11-09 21:04:00 +00:00
}
/// <inheritdoc />
2026-02-20 08:10:53 +00:00
public override Task < IEnumerable < Model > > GetImageModels ( string? apiKeyProvisional = null , CancellationToken token = default )
2024-11-09 21:04:00 +00:00
{
2026-02-20 08:10:53 +00:00
return Task . FromResult ( Enumerable . Empty < Model > ( ) ) ;
2024-11-09 21:04:00 +00:00
}
2026-02-20 08:10:53 +00:00
public override async Task < IEnumerable < Model > > GetEmbeddingModels ( string? apiKeyProvisional = null , CancellationToken token = default )
2024-12-03 14:24:40 +00:00
{
2026-02-20 08:10:53 +00:00
var models = await this . LoadModels ( SecretStoreType . EMBEDDING_PROVIDER , token , apiKeyProvisional ) ;
return models . Where ( model = > this . IsEmbeddingModel ( model . Id ) )
. Select ( this . WithDisplayNameFallback ) ;
2024-12-03 14:24:40 +00:00
}
2025-05-11 10:51:35 +00:00
2026-01-09 11:45:21 +00:00
/// <inheritdoc />
2026-02-20 08:10:53 +00:00
public override Task < IEnumerable < Model > > GetTranscriptionModels ( string? apiKeyProvisional = null , CancellationToken token = default )
2026-01-09 11:45:21 +00:00
{
2026-02-20 08:10:53 +00:00
return Task . FromResult ( Enumerable . Empty < Model > ( ) ) ;
2026-01-09 11:45:21 +00:00
}
2025-05-11 10:51:35 +00:00
2024-11-09 21:04:00 +00:00
#endregion
2026-02-20 08:10:53 +00:00
private async Task < IReadOnlyList < Model > > LoadModels ( SecretStoreType storeType , CancellationToken token , string? apiKeyProvisional = null )
2024-11-09 21:04:00 +00:00
{
var secretKey = apiKeyProvisional switch
{
not null = > apiKeyProvisional ,
2026-01-11 15:02:28 +00:00
_ = > await RUST_SERVICE . GetAPIKey ( this , storeType ) switch
2024-11-09 21:04:00 +00:00
{
{ Success : true } result = > await result . Secret . Decrypt ( ENCRYPTION ) ,
_ = > null ,
}
} ;
2024-12-03 14:24:40 +00:00
2026-02-20 08:10:53 +00:00
if ( string . IsNullOrWhiteSpace ( secretKey ) )
return [ ] ;
2024-11-09 21:04:00 +00:00
2026-02-20 08:10:53 +00:00
using var request = new HttpRequestMessage ( HttpMethod . Get , "models" ) ;
request . Headers . Authorization = new AuthenticationHeaderValue ( "Bearer" , secretKey ) ;
2024-11-09 21:04:00 +00:00
2026-02-20 08:10:53 +00:00
using var response = await this . httpClient . SendAsync ( request , token ) ;
2024-11-09 21:04:00 +00:00
if ( ! response . IsSuccessStatusCode )
2026-02-20 08:10:53 +00:00
{
LOGGER . LogError ( "Failed to load models with status code {ResponseStatusCode} and body: '{ResponseBody}'." , response . StatusCode , await response . Content . ReadAsStringAsync ( token ) ) ;
return [ ] ;
}
try
{
var modelResponse = await response . Content . ReadFromJsonAsync < ModelsResponse > ( token ) ;
if ( modelResponse = = default | | modelResponse . Data . Count is 0 )
{
LOGGER . LogError ( "Google model list response did not contain a valid data array." ) ;
return [ ] ;
}
2024-11-09 21:04:00 +00:00
2026-02-20 08:10:53 +00:00
return modelResponse . Data
. Where ( model = > ! string . IsNullOrWhiteSpace ( model . Id ) )
. Select ( model = > new Model ( this . NormalizeModelId ( model . Id ) , model . DisplayName ) )
. ToArray ( ) ;
}
catch ( Exception e )
{
LOGGER . LogError ( "Failed to parse Google model list response: '{Message}'." , e . Message ) ;
return [ ] ;
}
}
private bool IsEmbeddingModel ( string modelId )
{
return modelId . Contains ( "embedding" , StringComparison . OrdinalIgnoreCase ) | |
modelId . Contains ( "embed" , StringComparison . OrdinalIgnoreCase ) ;
}
private Model WithDisplayNameFallback ( Model model )
{
return string . IsNullOrWhiteSpace ( model . DisplayName )
? new Model ( model . Id , model . Id )
: model ;
}
private string NormalizeModelId ( string modelId )
{
return modelId . StartsWith ( "models/" , StringComparison . OrdinalIgnoreCase )
? modelId [ "models/" . Length . . ]
: modelId ;
2024-11-09 21:04:00 +00:00
}
}