mirror of
https://github.com/MindWorkAI/AI-Studio.git
synced 2026-02-13 02:41:37 +00:00
Support multiple SecretStoreType prefixes for API keys
This commit is contained in:
parent
96f316ac26
commit
9eed4d950e
@ -100,13 +100,13 @@ public partial class SettingsPanelEmbeddings : SettingsPanelBase
|
||||
if (dialogResult is null || dialogResult.Canceled)
|
||||
return;
|
||||
|
||||
var deleteSecretResponse = await this.RustService.DeleteAPIKey(provider);
|
||||
var deleteSecretResponse = await this.RustService.DeleteAPIKey(provider, SecretStoreType.EMBEDDING_PROVIDER);
|
||||
if(deleteSecretResponse.Success)
|
||||
{
|
||||
this.SettingsManager.ConfigurationData.EmbeddingProviders.Remove(provider);
|
||||
await this.SettingsManager.StoreSettings();
|
||||
}
|
||||
|
||||
|
||||
await this.UpdateEmbeddingProviders();
|
||||
await this.MessageBus.SendMessage<bool>(this, Event.CONFIGURATION_CHANGED);
|
||||
}
|
||||
|
||||
@ -107,7 +107,7 @@ public partial class SettingsPanelProviders : SettingsPanelBase
|
||||
if (dialogResult is null || dialogResult.Canceled)
|
||||
return;
|
||||
|
||||
var deleteSecretResponse = await this.RustService.DeleteAPIKey(provider);
|
||||
var deleteSecretResponse = await this.RustService.DeleteAPIKey(provider, SecretStoreType.LLM_PROVIDER);
|
||||
if(deleteSecretResponse.Success)
|
||||
{
|
||||
this.SettingsManager.ConfigurationData.Providers.Remove(provider);
|
||||
|
||||
@ -100,13 +100,13 @@ public partial class SettingsPanelTranscription : SettingsPanelBase
|
||||
if (dialogResult is null || dialogResult.Canceled)
|
||||
return;
|
||||
|
||||
var deleteSecretResponse = await this.RustService.DeleteAPIKey(provider);
|
||||
var deleteSecretResponse = await this.RustService.DeleteAPIKey(provider, SecretStoreType.TRANSCRIPTION_PROVIDER);
|
||||
if(deleteSecretResponse.Success)
|
||||
{
|
||||
this.SettingsManager.ConfigurationData.TranscriptionProviders.Remove(provider);
|
||||
await this.SettingsManager.StoreSettings();
|
||||
}
|
||||
|
||||
|
||||
await this.UpdateTranscriptionProviders();
|
||||
await this.MessageBus.SendMessage<bool>(this, Event.CONFIGURATION_CHANGED);
|
||||
}
|
||||
|
||||
@ -167,7 +167,7 @@ public partial class EmbeddingProviderDialog : MSGComponentBase, ISecretId
|
||||
}
|
||||
|
||||
// Load the API key:
|
||||
var requestedSecret = await this.RustService.GetAPIKey(this, isTrying: this.DataLLMProvider is LLMProviders.SELF_HOSTED);
|
||||
var requestedSecret = await this.RustService.GetAPIKey(this, SecretStoreType.EMBEDDING_PROVIDER, isTrying: this.DataLLMProvider is LLMProviders.SELF_HOSTED);
|
||||
if (requestedSecret.Success)
|
||||
this.dataAPIKey = await requestedSecret.Secret.Decrypt(this.encryption);
|
||||
else
|
||||
@ -219,7 +219,7 @@ public partial class EmbeddingProviderDialog : MSGComponentBase, ISecretId
|
||||
if (!string.IsNullOrWhiteSpace(this.dataAPIKey))
|
||||
{
|
||||
// Store the API key in the OS secure storage:
|
||||
var storeResponse = await this.RustService.SetAPIKey(this, this.dataAPIKey);
|
||||
var storeResponse = await this.RustService.SetAPIKey(this, this.dataAPIKey, SecretStoreType.EMBEDDING_PROVIDER);
|
||||
if (!storeResponse.Success)
|
||||
{
|
||||
this.dataAPIKeyStorageIssue = string.Format(T("Failed to store the API key in the operating system. The message was: {0}. Please try again."), storeResponse.Issue);
|
||||
|
||||
@ -180,7 +180,7 @@ public partial class ProviderDialog : MSGComponentBase, ISecretId
|
||||
}
|
||||
|
||||
// Load the API key:
|
||||
var requestedSecret = await this.RustService.GetAPIKey(this, isTrying: this.DataLLMProvider is LLMProviders.SELF_HOSTED);
|
||||
var requestedSecret = await this.RustService.GetAPIKey(this, SecretStoreType.LLM_PROVIDER, isTrying: this.DataLLMProvider is LLMProviders.SELF_HOSTED);
|
||||
if (requestedSecret.Success)
|
||||
this.dataAPIKey = await requestedSecret.Secret.Decrypt(this.encryption);
|
||||
else
|
||||
@ -233,7 +233,7 @@ public partial class ProviderDialog : MSGComponentBase, ISecretId
|
||||
if (!string.IsNullOrWhiteSpace(this.dataAPIKey))
|
||||
{
|
||||
// Store the API key in the OS secure storage:
|
||||
var storeResponse = await this.RustService.SetAPIKey(this, this.dataAPIKey);
|
||||
var storeResponse = await this.RustService.SetAPIKey(this, this.dataAPIKey, SecretStoreType.LLM_PROVIDER);
|
||||
if (!storeResponse.Success)
|
||||
{
|
||||
this.dataAPIKeyStorageIssue = string.Format(T("Failed to store the API key in the operating system. The message was: {0}. Please try again."), storeResponse.Issue);
|
||||
|
||||
@ -175,7 +175,7 @@ public partial class TranscriptionProviderDialog : MSGComponentBase, ISecretId
|
||||
}
|
||||
|
||||
// Load the API key:
|
||||
var requestedSecret = await this.RustService.GetAPIKey(this, isTrying: this.DataLLMProvider is LLMProviders.SELF_HOSTED);
|
||||
var requestedSecret = await this.RustService.GetAPIKey(this, SecretStoreType.TRANSCRIPTION_PROVIDER, isTrying: this.DataLLMProvider is LLMProviders.SELF_HOSTED);
|
||||
if (requestedSecret.Success)
|
||||
this.dataAPIKey = await requestedSecret.Secret.Decrypt(this.encryption);
|
||||
else
|
||||
@ -227,7 +227,7 @@ public partial class TranscriptionProviderDialog : MSGComponentBase, ISecretId
|
||||
if (!string.IsNullOrWhiteSpace(this.dataAPIKey))
|
||||
{
|
||||
// Store the API key in the OS secure storage:
|
||||
var storeResponse = await this.RustService.SetAPIKey(this, this.dataAPIKey);
|
||||
var storeResponse = await this.RustService.SetAPIKey(this, this.dataAPIKey, SecretStoreType.TRANSCRIPTION_PROVIDER);
|
||||
if (!storeResponse.Success)
|
||||
{
|
||||
this.dataAPIKeyStorageIssue = string.Format(T("Failed to store the API key in the operating system. The message was: {0}. Please try again."), storeResponse.Issue);
|
||||
|
||||
@ -25,7 +25,7 @@ public sealed class ProviderAlibabaCloud() : BaseProvider(LLMProviders.ALIBABA_C
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||
if(!requestedSecret.Success)
|
||||
yield break;
|
||||
|
||||
@ -117,7 +117,7 @@ public sealed class ProviderAlibabaCloud() : BaseProvider(LLMProviders.ALIBABA_C
|
||||
new Model("qwen2.5-vl-3b-instruct", "Qwen2.5-VL 3b"),
|
||||
};
|
||||
|
||||
return this.LoadModels(["q"],token, apiKeyProvisional).ContinueWith(t => t.Result.Concat(additionalModels).OrderBy(x => x.Id).AsEnumerable(), token);
|
||||
return this.LoadModels(["q"], SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional).ContinueWith(t => t.Result.Concat(additionalModels).OrderBy(x => x.Id).AsEnumerable(), token);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
@ -135,7 +135,7 @@ public sealed class ProviderAlibabaCloud() : BaseProvider(LLMProviders.ALIBABA_C
|
||||
new Model("text-embedding-v3", "text-embedding-v3"),
|
||||
};
|
||||
|
||||
return this.LoadModels(["text-embedding-"], token, apiKeyProvisional).ContinueWith(t => t.Result.Concat(additionalModels).OrderBy(x => x.Id).AsEnumerable(), token);
|
||||
return this.LoadModels(["text-embedding-"], SecretStoreType.EMBEDDING_PROVIDER, token, apiKeyProvisional).ContinueWith(t => t.Result.Concat(additionalModels).OrderBy(x => x.Id).AsEnumerable(), token);
|
||||
}
|
||||
|
||||
#region Overrides of BaseProvider
|
||||
@ -150,12 +150,12 @@ public sealed class ProviderAlibabaCloud() : BaseProvider(LLMProviders.ALIBABA_C
|
||||
|
||||
#endregion
|
||||
|
||||
private async Task<IEnumerable<Model>> LoadModels(string[] prefixes, CancellationToken token, string? apiKeyProvisional = null)
|
||||
private async Task<IEnumerable<Model>> LoadModels(string[] prefixes, SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
|
||||
{
|
||||
var secretKey = apiKeyProvisional switch
|
||||
{
|
||||
not null => apiKeyProvisional,
|
||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
||||
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||
{
|
||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||
_ => null,
|
||||
|
||||
@ -23,7 +23,7 @@ public sealed class ProviderAnthropic() : BaseProvider(LLMProviders.ANTHROPIC, "
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||
if(!requestedSecret.Success)
|
||||
yield break;
|
||||
|
||||
@ -127,7 +127,7 @@ public sealed class ProviderAnthropic() : BaseProvider(LLMProviders.ANTHROPIC, "
|
||||
new Model("claude-3-opus-latest", "Claude 3 Opus (Latest)"),
|
||||
};
|
||||
|
||||
return this.LoadModels(token, apiKeyProvisional).ContinueWith(t => t.Result.Concat(additionalModels).OrderBy(x => x.Id).AsEnumerable(), token);
|
||||
return this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional).ContinueWith(t => t.Result.Concat(additionalModels).OrderBy(x => x.Id).AsEnumerable(), token);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
@ -150,12 +150,12 @@ public sealed class ProviderAnthropic() : BaseProvider(LLMProviders.ANTHROPIC, "
|
||||
|
||||
#endregion
|
||||
|
||||
private async Task<IEnumerable<Model>> LoadModels(CancellationToken token, string? apiKeyProvisional = null)
|
||||
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
|
||||
{
|
||||
var secretKey = apiKeyProvisional switch
|
||||
{
|
||||
not null => apiKeyProvisional,
|
||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
||||
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||
{
|
||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||
_ => null,
|
||||
|
||||
@ -25,7 +25,7 @@ public sealed class ProviderDeepSeek() : BaseProvider(LLMProviders.DEEP_SEEK, "h
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||
if(!requestedSecret.Success)
|
||||
yield break;
|
||||
|
||||
@ -90,7 +90,7 @@ public sealed class ProviderDeepSeek() : BaseProvider(LLMProviders.DEEP_SEEK, "h
|
||||
/// <inheritdoc />
|
||||
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||
{
|
||||
return this.LoadModels(token, apiKeyProvisional);
|
||||
return this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
@ -113,12 +113,12 @@ public sealed class ProviderDeepSeek() : BaseProvider(LLMProviders.DEEP_SEEK, "h
|
||||
|
||||
#endregion
|
||||
|
||||
private async Task<IEnumerable<Model>> LoadModels(CancellationToken token, string? apiKeyProvisional = null)
|
||||
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
|
||||
{
|
||||
var secretKey = apiKeyProvisional switch
|
||||
{
|
||||
not null => apiKeyProvisional,
|
||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
||||
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||
{
|
||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||
_ => null,
|
||||
|
||||
@ -25,7 +25,7 @@ public class ProviderFireworks() : BaseProvider(LLMProviders.FIREWORKS, "https:/
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||
if(!requestedSecret.Success)
|
||||
yield break;
|
||||
|
||||
@ -85,7 +85,7 @@ public class ProviderFireworks() : BaseProvider(LLMProviders.FIREWORKS, "https:/
|
||||
/// <inheritdoc />
|
||||
public override async Task<string> TranscribeAudioAsync(Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default)
|
||||
{
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.TRANSCRIPTION_PROVIDER);
|
||||
return await this.PerformStandardTranscriptionRequest(requestedSecret, transcriptionModel, audioFilePath, token: token);
|
||||
}
|
||||
|
||||
|
||||
@ -25,7 +25,7 @@ public sealed class ProviderGWDG() : BaseProvider(LLMProviders.GWDG, "https://ch
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||
if(!requestedSecret.Success)
|
||||
yield break;
|
||||
|
||||
@ -84,14 +84,14 @@ public sealed class ProviderGWDG() : BaseProvider(LLMProviders.GWDG, "https://ch
|
||||
/// <inheritdoc />
|
||||
public override async Task<string> TranscribeAudioAsync(Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default)
|
||||
{
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.TRANSCRIPTION_PROVIDER);
|
||||
return await this.PerformStandardTranscriptionRequest(requestedSecret, transcriptionModel, audioFilePath, token: token);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public override async Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||
{
|
||||
var models = await this.LoadModels(token, apiKeyProvisional);
|
||||
var models = await this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
|
||||
return models.Where(model => !model.Id.StartsWith("e5-mistral-7b-instruct", StringComparison.InvariantCultureIgnoreCase));
|
||||
}
|
||||
|
||||
@ -104,7 +104,7 @@ public sealed class ProviderGWDG() : BaseProvider(LLMProviders.GWDG, "https://ch
|
||||
/// <inheritdoc />
|
||||
public override async Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||
{
|
||||
var models = await this.LoadModels(token, apiKeyProvisional);
|
||||
var models = await this.LoadModels(SecretStoreType.EMBEDDING_PROVIDER, token, apiKeyProvisional);
|
||||
return models.Where(model => model.Id.StartsWith("e5-", StringComparison.InvariantCultureIgnoreCase));
|
||||
}
|
||||
|
||||
@ -121,12 +121,12 @@ public sealed class ProviderGWDG() : BaseProvider(LLMProviders.GWDG, "https://ch
|
||||
|
||||
#endregion
|
||||
|
||||
private async Task<IEnumerable<Model>> LoadModels(CancellationToken token, string? apiKeyProvisional = null)
|
||||
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
|
||||
{
|
||||
var secretKey = apiKeyProvisional switch
|
||||
{
|
||||
not null => apiKeyProvisional,
|
||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
||||
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||
{
|
||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||
_ => null,
|
||||
|
||||
@ -25,7 +25,7 @@ public class ProviderGoogle() : BaseProvider(LLMProviders.GOOGLE, "https://gener
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||
if(!requestedSecret.Success)
|
||||
yield break;
|
||||
|
||||
@ -91,7 +91,7 @@ public class ProviderGoogle() : BaseProvider(LLMProviders.GOOGLE, "https://gener
|
||||
/// <inheritdoc />
|
||||
public override async Task<IEnumerable<Provider.Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||
{
|
||||
var modelResponse = await this.LoadModels(token, apiKeyProvisional);
|
||||
var modelResponse = await this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
|
||||
if(modelResponse == default)
|
||||
return [];
|
||||
|
||||
@ -108,7 +108,7 @@ public class ProviderGoogle() : BaseProvider(LLMProviders.GOOGLE, "https://gener
|
||||
|
||||
public override async Task<IEnumerable<Provider.Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||
{
|
||||
var modelResponse = await this.LoadModels(token, apiKeyProvisional);
|
||||
var modelResponse = await this.LoadModels(SecretStoreType.EMBEDDING_PROVIDER, token, apiKeyProvisional);
|
||||
if(modelResponse == default)
|
||||
return [];
|
||||
|
||||
@ -126,12 +126,12 @@ public class ProviderGoogle() : BaseProvider(LLMProviders.GOOGLE, "https://gener
|
||||
|
||||
#endregion
|
||||
|
||||
private async Task<ModelsResponse> LoadModels(CancellationToken token, string? apiKeyProvisional = null)
|
||||
private async Task<ModelsResponse> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
|
||||
{
|
||||
var secretKey = apiKeyProvisional switch
|
||||
{
|
||||
not null => apiKeyProvisional,
|
||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
||||
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||
{
|
||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||
_ => null,
|
||||
|
||||
@ -25,7 +25,7 @@ public class ProviderGroq() : BaseProvider(LLMProviders.GROQ, "https://api.groq.
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||
if(!requestedSecret.Success)
|
||||
yield break;
|
||||
|
||||
@ -91,13 +91,13 @@ public class ProviderGroq() : BaseProvider(LLMProviders.GROQ, "https://api.groq.
|
||||
/// <inheritdoc />
|
||||
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||
{
|
||||
return this.LoadModels(token, apiKeyProvisional);
|
||||
return this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||
{
|
||||
return Task.FromResult<IEnumerable<Model>>(Array.Empty<Model>());
|
||||
return Task.FromResult<IEnumerable<Model>>([]);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
@ -114,12 +114,12 @@ public class ProviderGroq() : BaseProvider(LLMProviders.GROQ, "https://api.groq.
|
||||
|
||||
#endregion
|
||||
|
||||
private async Task<IEnumerable<Model>> LoadModels(CancellationToken token, string? apiKeyProvisional = null)
|
||||
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
|
||||
{
|
||||
var secretKey = apiKeyProvisional switch
|
||||
{
|
||||
not null => apiKeyProvisional,
|
||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
||||
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||
{
|
||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||
_ => null,
|
||||
|
||||
@ -25,7 +25,7 @@ public sealed class ProviderHelmholtz() : BaseProvider(LLMProviders.HELMHOLTZ, "
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||
if(!requestedSecret.Success)
|
||||
yield break;
|
||||
|
||||
@ -90,7 +90,7 @@ public sealed class ProviderHelmholtz() : BaseProvider(LLMProviders.HELMHOLTZ, "
|
||||
/// <inheritdoc />
|
||||
public override async Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||
{
|
||||
var models = await this.LoadModels(token, apiKeyProvisional);
|
||||
var models = await this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
|
||||
return models.Where(model => !model.Id.StartsWith("text-", StringComparison.InvariantCultureIgnoreCase) &&
|
||||
!model.Id.StartsWith("alias-embedding", StringComparison.InvariantCultureIgnoreCase));
|
||||
}
|
||||
@ -104,7 +104,7 @@ public sealed class ProviderHelmholtz() : BaseProvider(LLMProviders.HELMHOLTZ, "
|
||||
/// <inheritdoc />
|
||||
public override async Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||
{
|
||||
var models = await this.LoadModels(token, apiKeyProvisional);
|
||||
var models = await this.LoadModels(SecretStoreType.EMBEDDING_PROVIDER, token, apiKeyProvisional);
|
||||
return models.Where(model =>
|
||||
model.Id.StartsWith("alias-embedding", StringComparison.InvariantCultureIgnoreCase) ||
|
||||
model.Id.StartsWith("text-", StringComparison.InvariantCultureIgnoreCase) ||
|
||||
@ -119,12 +119,12 @@ public sealed class ProviderHelmholtz() : BaseProvider(LLMProviders.HELMHOLTZ, "
|
||||
|
||||
#endregion
|
||||
|
||||
private async Task<IEnumerable<Model>> LoadModels(CancellationToken token, string? apiKeyProvisional = null)
|
||||
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
|
||||
{
|
||||
var secretKey = apiKeyProvisional switch
|
||||
{
|
||||
not null => apiKeyProvisional,
|
||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
||||
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||
{
|
||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||
_ => null,
|
||||
|
||||
@ -30,7 +30,7 @@ public sealed class ProviderHuggingFace : BaseProvider
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||
if(!requestedSecret.Success)
|
||||
yield break;
|
||||
|
||||
|
||||
@ -23,7 +23,7 @@ public sealed class ProviderMistral() : BaseProvider(LLMProviders.MISTRAL, "http
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||
if(!requestedSecret.Success)
|
||||
yield break;
|
||||
|
||||
@ -85,14 +85,14 @@ public sealed class ProviderMistral() : BaseProvider(LLMProviders.MISTRAL, "http
|
||||
/// <inheritdoc />
|
||||
public override async Task<string> TranscribeAudioAsync(Provider.Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default)
|
||||
{
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.TRANSCRIPTION_PROVIDER);
|
||||
return await this.PerformStandardTranscriptionRequest(requestedSecret, transcriptionModel, audioFilePath, token: token);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public override async Task<IEnumerable<Provider.Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||
{
|
||||
var modelResponse = await this.LoadModelList(apiKeyProvisional, token);
|
||||
var modelResponse = await this.LoadModelList(SecretStoreType.LLM_PROVIDER, apiKeyProvisional, token);
|
||||
if(modelResponse == default)
|
||||
return [];
|
||||
|
||||
@ -106,7 +106,7 @@ public sealed class ProviderMistral() : BaseProvider(LLMProviders.MISTRAL, "http
|
||||
/// <inheritdoc />
|
||||
public override async Task<IEnumerable<Provider.Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||
{
|
||||
var modelResponse = await this.LoadModelList(apiKeyProvisional, token);
|
||||
var modelResponse = await this.LoadModelList(SecretStoreType.EMBEDDING_PROVIDER, apiKeyProvisional, token);
|
||||
if(modelResponse == default)
|
||||
return [];
|
||||
|
||||
@ -133,12 +133,12 @@ public sealed class ProviderMistral() : BaseProvider(LLMProviders.MISTRAL, "http
|
||||
|
||||
#endregion
|
||||
|
||||
private async Task<ModelsResponse> LoadModelList(string? apiKeyProvisional, CancellationToken token)
|
||||
private async Task<ModelsResponse> LoadModelList(SecretStoreType storeType, string? apiKeyProvisional, CancellationToken token)
|
||||
{
|
||||
var secretKey = apiKeyProvisional switch
|
||||
{
|
||||
not null => apiKeyProvisional,
|
||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
||||
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||
{
|
||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||
_ => null,
|
||||
|
||||
@ -27,7 +27,7 @@ public sealed class ProviderOpenAI() : BaseProvider(LLMProviders.OPEN_AI, "https
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||
if(!requestedSecret.Success)
|
||||
yield break;
|
||||
|
||||
@ -221,14 +221,14 @@ public sealed class ProviderOpenAI() : BaseProvider(LLMProviders.OPEN_AI, "https
|
||||
/// <inheritdoc />
|
||||
public override async Task<string> TranscribeAudioAsync(Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default)
|
||||
{
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.TRANSCRIPTION_PROVIDER);
|
||||
return await this.PerformStandardTranscriptionRequest(requestedSecret, transcriptionModel, audioFilePath, token: token);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public override async Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||
{
|
||||
var models = await this.LoadModels(["chatgpt-", "gpt-", "o1-", "o3-", "o4-"], token, apiKeyProvisional);
|
||||
var models = await this.LoadModels(SecretStoreType.LLM_PROVIDER, ["chatgpt-", "gpt-", "o1-", "o3-", "o4-"], token, apiKeyProvisional);
|
||||
return models.Where(model => !model.Id.Contains("image", StringComparison.OrdinalIgnoreCase) &&
|
||||
!model.Id.Contains("realtime", StringComparison.OrdinalIgnoreCase) &&
|
||||
!model.Id.Contains("audio", StringComparison.OrdinalIgnoreCase) &&
|
||||
@ -239,31 +239,31 @@ public sealed class ProviderOpenAI() : BaseProvider(LLMProviders.OPEN_AI, "https
|
||||
/// <inheritdoc />
|
||||
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||
{
|
||||
return this.LoadModels(["dall-e-", "gpt-image"], token, apiKeyProvisional);
|
||||
return this.LoadModels(SecretStoreType.IMAGE_PROVIDER, ["dall-e-", "gpt-image"], token, apiKeyProvisional);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public override Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||
{
|
||||
return this.LoadModels(["text-embedding-"], token, apiKeyProvisional);
|
||||
return this.LoadModels(SecretStoreType.EMBEDDING_PROVIDER, ["text-embedding-"], token, apiKeyProvisional);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public override async Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||
{
|
||||
var models = await this.LoadModels(["whisper-", "gpt-"], token, apiKeyProvisional);
|
||||
var models = await this.LoadModels(SecretStoreType.TRANSCRIPTION_PROVIDER, ["whisper-", "gpt-"], token, apiKeyProvisional);
|
||||
return models.Where(model => model.Id.StartsWith("whisper-", StringComparison.InvariantCultureIgnoreCase) ||
|
||||
model.Id.Contains("-transcribe", StringComparison.InvariantCultureIgnoreCase));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
private async Task<IEnumerable<Model>> LoadModels(string[] prefixes, CancellationToken token, string? apiKeyProvisional = null)
|
||||
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, string[] prefixes, CancellationToken token, string? apiKeyProvisional = null)
|
||||
{
|
||||
var secretKey = apiKeyProvisional switch
|
||||
{
|
||||
not null => apiKeyProvisional,
|
||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
||||
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||
{
|
||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||
_ => null,
|
||||
|
||||
@ -28,7 +28,7 @@ public sealed class ProviderOpenRouter() : BaseProvider(LLMProviders.OPEN_ROUTER
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||
if(!requestedSecret.Success)
|
||||
yield break;
|
||||
|
||||
@ -98,7 +98,7 @@ public sealed class ProviderOpenRouter() : BaseProvider(LLMProviders.OPEN_ROUTER
|
||||
/// <inheritdoc />
|
||||
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||
{
|
||||
return this.LoadModels(token, apiKeyProvisional);
|
||||
return this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
@ -121,12 +121,12 @@ public sealed class ProviderOpenRouter() : BaseProvider(LLMProviders.OPEN_ROUTER
|
||||
|
||||
#endregion
|
||||
|
||||
private async Task<IEnumerable<Model>> LoadModels(CancellationToken token, string? apiKeyProvisional = null)
|
||||
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
|
||||
{
|
||||
var secretKey = apiKeyProvisional switch
|
||||
{
|
||||
not null => apiKeyProvisional,
|
||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
||||
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||
{
|
||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||
_ => null,
|
||||
@ -168,7 +168,7 @@ public sealed class ProviderOpenRouter() : BaseProvider(LLMProviders.OPEN_ROUTER
|
||||
var secretKey = apiKeyProvisional switch
|
||||
{
|
||||
not null => apiKeyProvisional,
|
||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
||||
_ => await RUST_SERVICE.GetAPIKey(this, SecretStoreType.EMBEDDING_PROVIDER) switch
|
||||
{
|
||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||
_ => null,
|
||||
|
||||
@ -34,7 +34,7 @@ public sealed class ProviderPerplexity() : BaseProvider(LLMProviders.PERPLEXITY,
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||
if(!requestedSecret.Success)
|
||||
yield break;
|
||||
|
||||
|
||||
@ -26,7 +26,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, isTrying: true);
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER, isTrying: true);
|
||||
|
||||
// Prepare the system prompt:
|
||||
var systemPrompt = new TextMessage
|
||||
@ -91,7 +91,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
|
||||
/// <inheritdoc />
|
||||
public override async Task<string> TranscribeAudioAsync(Provider.Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default)
|
||||
{
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, isTrying: true);
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.TRANSCRIPTION_PROVIDER, isTrying: true);
|
||||
return await this.PerformStandardTranscriptionRequest(requestedSecret, transcriptionModel, audioFilePath, host, token);
|
||||
}
|
||||
|
||||
@ -109,7 +109,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
|
||||
case Host.LM_STUDIO:
|
||||
case Host.OLLAMA:
|
||||
case Host.VLLM:
|
||||
return await this.LoadModels(["embed"], [], token, apiKeyProvisional);
|
||||
return await this.LoadModels( SecretStoreType.LLM_PROVIDER, ["embed"], [], token, apiKeyProvisional);
|
||||
}
|
||||
|
||||
return [];
|
||||
@ -136,7 +136,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
|
||||
case Host.LM_STUDIO:
|
||||
case Host.OLLAMA:
|
||||
case Host.VLLM:
|
||||
return await this.LoadModels([], ["embed"], token, apiKeyProvisional);
|
||||
return await this.LoadModels( SecretStoreType.EMBEDDING_PROVIDER, [], ["embed"], token, apiKeyProvisional);
|
||||
}
|
||||
|
||||
return [];
|
||||
@ -164,7 +164,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
|
||||
|
||||
case Host.OLLAMA:
|
||||
case Host.VLLM:
|
||||
return this.LoadModels([], [], token, apiKeyProvisional);
|
||||
return this.LoadModels(SecretStoreType.TRANSCRIPTION_PROVIDER, [], [], token, apiKeyProvisional);
|
||||
|
||||
default:
|
||||
return Task.FromResult(Enumerable.Empty<Provider.Model>());
|
||||
@ -179,12 +179,12 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
|
||||
|
||||
#endregion
|
||||
|
||||
private async Task<IEnumerable<Provider.Model>> LoadModels(string[] ignorePhrases, string[] filterPhrases, CancellationToken token, string? apiKeyProvisional = null)
|
||||
private async Task<IEnumerable<Provider.Model>> LoadModels(SecretStoreType storeType, string[] ignorePhrases, string[] filterPhrases, CancellationToken token, string? apiKeyProvisional = null)
|
||||
{
|
||||
var secretKey = apiKeyProvisional switch
|
||||
{
|
||||
not null => apiKeyProvisional,
|
||||
_ => await RUST_SERVICE.GetAPIKey(this, isTrying: true) switch
|
||||
_ => await RUST_SERVICE.GetAPIKey(this, storeType, isTrying: true) switch
|
||||
{
|
||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||
_ => null,
|
||||
|
||||
@ -25,7 +25,7 @@ public sealed class ProviderX() : BaseProvider(LLMProviders.X, "https://api.x.ai
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||
if(!requestedSecret.Success)
|
||||
yield break;
|
||||
|
||||
@ -91,7 +91,7 @@ public sealed class ProviderX() : BaseProvider(LLMProviders.X, "https://api.x.ai
|
||||
/// <inheritdoc />
|
||||
public override async Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||
{
|
||||
var models = await this.LoadModels(["grok-"], token, apiKeyProvisional);
|
||||
var models = await this.LoadModels(SecretStoreType.LLM_PROVIDER, ["grok-"], token, apiKeyProvisional);
|
||||
return models.Where(n => !n.Id.Contains("-image", StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
@ -115,12 +115,12 @@ public sealed class ProviderX() : BaseProvider(LLMProviders.X, "https://api.x.ai
|
||||
|
||||
#endregion
|
||||
|
||||
private async Task<IEnumerable<Model>> LoadModels(string[] prefixes, CancellationToken token, string? apiKeyProvisional = null)
|
||||
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, string[] prefixes, CancellationToken token, string? apiKeyProvisional = null)
|
||||
{
|
||||
var secretKey = apiKeyProvisional switch
|
||||
{
|
||||
not null => apiKeyProvisional,
|
||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
||||
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||
{
|
||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||
_ => null,
|
||||
|
||||
32
app/MindWork AI Studio/Tools/SecretStoreType.cs
Normal file
32
app/MindWork AI Studio/Tools/SecretStoreType.cs
Normal file
@ -0,0 +1,32 @@
|
||||
namespace AIStudio.Tools;
|
||||
|
||||
/// <summary>
|
||||
/// Represents the type of secret store used for API keys.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Different provider types use different prefixes for storing API keys.
|
||||
/// This prevents collisions when the same instance name is used across
|
||||
/// different provider types (e.g., LLM, Embedding, Transcription).
|
||||
/// </remarks>
|
||||
public enum SecretStoreType
|
||||
{
|
||||
/// <summary>
|
||||
/// LLM provider secrets. Uses the legacy "provider::" prefix for backward compatibility.
|
||||
/// </summary>
|
||||
LLM_PROVIDER = 0,
|
||||
|
||||
/// <summary>
|
||||
/// Embedding provider secrets. Uses the "embedding::" prefix.
|
||||
/// </summary>
|
||||
EMBEDDING_PROVIDER,
|
||||
|
||||
/// <summary>
|
||||
/// Transcription provider secrets. Uses the "transcription::" prefix.
|
||||
/// </summary>
|
||||
TRANSCRIPTION_PROVIDER,
|
||||
|
||||
/// <summary>
|
||||
/// Image provider secrets. Uses the "image::" prefix.
|
||||
/// </summary>
|
||||
IMAGE_PROVIDER,
|
||||
}
|
||||
21
app/MindWork AI Studio/Tools/SecretStoreTypeExtensions.cs
Normal file
21
app/MindWork AI Studio/Tools/SecretStoreTypeExtensions.cs
Normal file
@ -0,0 +1,21 @@
|
||||
namespace AIStudio.Tools;
|
||||
|
||||
public static class SecretStoreTypeExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the prefix string associated with the SecretStoreType.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// LLM_PROVIDER uses the legacy "provider" prefix for backward compatibility.
|
||||
/// </remarks>
|
||||
/// <param name="type">The SecretStoreType enum value.</param>
|
||||
/// <returns>>The corresponding prefix string.</returns>
|
||||
public static string Prefix(this SecretStoreType type) => type switch
|
||||
{
|
||||
SecretStoreType.LLM_PROVIDER => "provider",
|
||||
SecretStoreType.EMBEDDING_PROVIDER => "embedding",
|
||||
SecretStoreType.TRANSCRIPTION_PROVIDER => "transcription",
|
||||
|
||||
_ => "provider",
|
||||
};
|
||||
}
|
||||
@ -9,70 +9,76 @@ public sealed partial class RustService
|
||||
/// </summary>
|
||||
/// <param name="secretId">The secret ID to get the API key for.</param>
|
||||
/// <param name="isTrying">Indicates if we are trying to get the API key. In that case, we don't log errors.</param>
|
||||
/// <param name="storeType">The secret store type. Defaults to LLM_PROVIDER for backward compatibility.</param>
|
||||
/// <returns>The requested secret.</returns>
|
||||
public async Task<RequestedSecret> GetAPIKey(ISecretId secretId, bool isTrying = false)
|
||||
public async Task<RequestedSecret> GetAPIKey(ISecretId secretId, SecretStoreType storeType, bool isTrying = false)
|
||||
{
|
||||
var secretRequest = new SelectSecretRequest($"provider::{secretId.SecretId}::{secretId.SecretName}::api_key", Environment.UserName, isTrying);
|
||||
var prefix = storeType.Prefix();
|
||||
var secretRequest = new SelectSecretRequest($"{prefix}::{secretId.SecretId}::{secretId.SecretName}::api_key", Environment.UserName, isTrying);
|
||||
var result = await this.http.PostAsJsonAsync("/secrets/get", secretRequest, this.jsonRustSerializerOptions);
|
||||
if (!result.IsSuccessStatusCode)
|
||||
{
|
||||
if(!isTrying)
|
||||
this.logger!.LogError($"Failed to get the API key for 'provider::{secretId.SecretId}::{secretId.SecretName}::api_key' due to an API issue: '{result.StatusCode}'");
|
||||
this.logger!.LogError($"Failed to get the API key for '{prefix}::{secretId.SecretId}::{secretId.SecretName}::api_key' due to an API issue: '{result.StatusCode}'");
|
||||
return new RequestedSecret(false, new EncryptedText(string.Empty), TB("Failed to get the API key due to an API issue."));
|
||||
}
|
||||
|
||||
|
||||
var secret = await result.Content.ReadFromJsonAsync<RequestedSecret>(this.jsonRustSerializerOptions);
|
||||
if (!secret.Success && !isTrying)
|
||||
this.logger!.LogError($"Failed to get the API key for 'provider::{secretId.SecretId}::{secretId.SecretName}::api_key': '{secret.Issue}'");
|
||||
|
||||
this.logger!.LogDebug($"Successfully retrieved the API key for 'provider::{secretId.SecretId}::{secretId.SecretName}::api_key'.");
|
||||
this.logger!.LogError($"Failed to get the API key for '{prefix}::{secretId.SecretId}::{secretId.SecretName}::api_key': '{secret.Issue}'");
|
||||
|
||||
this.logger!.LogDebug($"Successfully retrieved the API key for '{prefix}::{secretId.SecretId}::{secretId.SecretName}::api_key'.");
|
||||
return secret;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Try to store the API key for the given secret ID.
|
||||
/// </summary>
|
||||
/// <param name="secretId">The secret ID to store the API key for.</param>
|
||||
/// <param name="key">The API key to store.</param>
|
||||
/// <param name="storeType">The secret store type. Defaults to LLM_PROVIDER for backward compatibility.</param>
|
||||
/// <returns>The store secret response.</returns>
|
||||
public async Task<StoreSecretResponse> SetAPIKey(ISecretId secretId, string key)
|
||||
public async Task<StoreSecretResponse> SetAPIKey(ISecretId secretId, string key, SecretStoreType storeType)
|
||||
{
|
||||
var prefix = storeType.Prefix();
|
||||
var encryptedKey = await this.encryptor!.Encrypt(key);
|
||||
var request = new StoreSecretRequest($"provider::{secretId.SecretId}::{secretId.SecretName}::api_key", Environment.UserName, encryptedKey);
|
||||
var request = new StoreSecretRequest($"{prefix}::{secretId.SecretId}::{secretId.SecretName}::api_key", Environment.UserName, encryptedKey);
|
||||
var result = await this.http.PostAsJsonAsync("/secrets/store", request, this.jsonRustSerializerOptions);
|
||||
if (!result.IsSuccessStatusCode)
|
||||
{
|
||||
this.logger!.LogError($"Failed to store the API key for 'provider::{secretId.SecretId}::{secretId.SecretName}::api_key' due to an API issue: '{result.StatusCode}'");
|
||||
this.logger!.LogError($"Failed to store the API key for '{prefix}::{secretId.SecretId}::{secretId.SecretName}::api_key' due to an API issue: '{result.StatusCode}'");
|
||||
return new StoreSecretResponse(false, TB("Failed to get the API key due to an API issue."));
|
||||
}
|
||||
|
||||
|
||||
var state = await result.Content.ReadFromJsonAsync<StoreSecretResponse>(this.jsonRustSerializerOptions);
|
||||
if (!state.Success)
|
||||
this.logger!.LogError($"Failed to store the API key for 'provider::{secretId.SecretId}::{secretId.SecretName}::api_key': '{state.Issue}'");
|
||||
|
||||
this.logger!.LogDebug($"Successfully stored the API key for 'provider::{secretId.SecretId}::{secretId.SecretName}::api_key'.");
|
||||
this.logger!.LogError($"Failed to store the API key for '{prefix}::{secretId.SecretId}::{secretId.SecretName}::api_key': '{state.Issue}'");
|
||||
|
||||
this.logger!.LogDebug($"Successfully stored the API key for '{prefix}::{secretId.SecretId}::{secretId.SecretName}::api_key'.");
|
||||
return state;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Tries to delete the API key for the given secret ID.
|
||||
/// </summary>
|
||||
/// <param name="secretId">The secret ID to delete the API key for.</param>
|
||||
/// <param name="storeType">The secret store type. Defaults to LLM_PROVIDER for backward compatibility.</param>
|
||||
/// <returns>The delete secret response.</returns>
|
||||
public async Task<DeleteSecretResponse> DeleteAPIKey(ISecretId secretId)
|
||||
public async Task<DeleteSecretResponse> DeleteAPIKey(ISecretId secretId, SecretStoreType storeType)
|
||||
{
|
||||
var request = new SelectSecretRequest($"provider::{secretId.SecretId}::{secretId.SecretName}::api_key", Environment.UserName, false);
|
||||
var prefix = storeType.Prefix();
|
||||
var request = new SelectSecretRequest($"{prefix}::{secretId.SecretId}::{secretId.SecretName}::api_key", Environment.UserName, false);
|
||||
var result = await this.http.PostAsJsonAsync("/secrets/delete", request, this.jsonRustSerializerOptions);
|
||||
if (!result.IsSuccessStatusCode)
|
||||
{
|
||||
this.logger!.LogError($"Failed to delete the API key for secret ID '{secretId.SecretId}' due to an API issue: '{result.StatusCode}'");
|
||||
return new DeleteSecretResponse{Success = false, WasEntryFound = false, Issue = TB("Failed to delete the API key due to an API issue.")};
|
||||
}
|
||||
|
||||
|
||||
var state = await result.Content.ReadFromJsonAsync<DeleteSecretResponse>(this.jsonRustSerializerOptions);
|
||||
if (!state.Success)
|
||||
this.logger!.LogError($"Failed to delete the API key for secret ID '{secretId.SecretId}': '{state.Issue}'");
|
||||
|
||||
|
||||
return state;
|
||||
}
|
||||
}
|
||||
Loading…
Reference in New Issue
Block a user