mirror of
https://github.com/MindWorkAI/AI-Studio.git
synced 2026-02-12 03:41:38 +00:00
Implemented the transcription API (#623)
Some checks are pending
Build and Release / Read metadata (push) Waiting to run
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-apple-darwin, osx-arm64, macos-latest, aarch64-apple-darwin, dmg updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-pc-windows-msvc.exe, win-arm64, windows-latest, aarch64-pc-windows-msvc, nsis updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-unknown-linux-gnu, linux-arm64, ubuntu-22.04-arm, aarch64-unknown-linux-gnu, appimage deb updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-apple-darwin, osx-x64, macos-latest, x86_64-apple-darwin, dmg updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-pc-windows-msvc.exe, win-x64, windows-latest, x86_64-pc-windows-msvc, nsis updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-unknown-linux-gnu, linux-x64, ubuntu-22.04, x86_64-unknown-linux-gnu, appimage deb updater) (push) Blocked by required conditions
Build and Release / Prepare & create release (push) Blocked by required conditions
Build and Release / Publish release (push) Blocked by required conditions
Some checks are pending
Build and Release / Read metadata (push) Waiting to run
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-apple-darwin, osx-arm64, macos-latest, aarch64-apple-darwin, dmg updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-pc-windows-msvc.exe, win-arm64, windows-latest, aarch64-pc-windows-msvc, nsis updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-unknown-linux-gnu, linux-arm64, ubuntu-22.04-arm, aarch64-unknown-linux-gnu, appimage deb updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-apple-darwin, osx-x64, macos-latest, x86_64-apple-darwin, dmg updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-pc-windows-msvc.exe, win-x64, windows-latest, x86_64-pc-windows-msvc, nsis updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-unknown-linux-gnu, linux-x64, ubuntu-22.04, x86_64-unknown-linux-gnu, appimage deb updater) (push) Blocked by required conditions
Build and Release / Prepare & create release (push) Blocked by required conditions
Build and Release / Publish release (push) Blocked by required conditions
This commit is contained in:
parent
b94614b0ab
commit
529986837e
@ -2128,6 +2128,9 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T32678
|
|||||||
-- Actions
|
-- Actions
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T3865031940"] = "Actions"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T3865031940"] = "Actions"
|
||||||
|
|
||||||
|
-- This embedding provider is managed by your organization.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T4062656589"] = "This embedding provider is managed by your organization."
|
||||||
|
|
||||||
-- No embeddings configured yet.
|
-- No embeddings configured yet.
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T4068015588"] = "No embeddings configured yet."
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T4068015588"] = "No embeddings configured yet."
|
||||||
|
|
||||||
@ -2287,15 +2290,18 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T40
|
|||||||
-- Configured Transcription Providers
|
-- Configured Transcription Providers
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T4210863523"] = "Configured Transcription Providers"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T4210863523"] = "Configured Transcription Providers"
|
||||||
|
|
||||||
|
-- With the support of transcription models, MindWork AI Studio can convert human speech into text. This is useful, for example, when you need to dictate text. You can choose from dedicated transcription models, but not multimodal LLMs (large language models) that can handle both speech and text. The configuration of multimodal models is done in the 'Configure providers' section.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T584860404"] = "With the support of transcription models, MindWork AI Studio can convert human speech into text. This is useful, for example, when you need to dictate text. You can choose from dedicated transcription models, but not multimodal LLMs (large language models) that can handle both speech and text. The configuration of multimodal models is done in the 'Configure providers' section."
|
||||||
|
|
||||||
|
-- This transcription provider is managed by your organization.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T756131076"] = "This transcription provider is managed by your organization."
|
||||||
|
|
||||||
-- Open Dashboard
|
-- Open Dashboard
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T78223861"] = "Open Dashboard"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T78223861"] = "Open Dashboard"
|
||||||
|
|
||||||
-- Are you sure you want to delete the transcription provider '{0}'?
|
-- Are you sure you want to delete the transcription provider '{0}'?
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T789660305"] = "Are you sure you want to delete the transcription provider '{0}'?"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T789660305"] = "Are you sure you want to delete the transcription provider '{0}'?"
|
||||||
|
|
||||||
-- With the support of transcription models, MindWork AI Studio can convert human speech into text. This is useful, for example, when you need to dictate text. You can choose from dedicated transcription models, but not multimodal LLMs (large language models) that can handle both speech and text. The configuration of multimodal models is done in the \"Configure providers\" section.
|
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T799338148"] = "With the support of transcription models, MindWork AI Studio can convert human speech into text. This is useful, for example, when you need to dictate text. You can choose from dedicated transcription models, but not multimodal LLMs (large language models) that can handle both speech and text. The configuration of multimodal models is done in the \\\"Configure providers\\\" section."
|
|
||||||
|
|
||||||
-- Provider
|
-- Provider
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T900237532"] = "Provider"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T900237532"] = "Provider"
|
||||||
|
|
||||||
@ -2380,12 +2386,33 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VISION::T428040679"] = "Content creation"
|
|||||||
-- Useful assistants
|
-- Useful assistants
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VISION::T586430036"] = "Useful assistants"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VISION::T586430036"] = "Useful assistants"
|
||||||
|
|
||||||
|
-- Failed to create the transcription provider.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T1689988905"] = "Failed to create the transcription provider."
|
||||||
|
|
||||||
-- Stop recording and start transcription
|
-- Stop recording and start transcription
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T224155287"] = "Stop recording and start transcription"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T224155287"] = "Stop recording and start transcription"
|
||||||
|
|
||||||
-- Start recording your voice for a transcription
|
-- Start recording your voice for a transcription
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T2372624045"] = "Start recording your voice for a transcription"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T2372624045"] = "Start recording your voice for a transcription"
|
||||||
|
|
||||||
|
-- Transcription in progress...
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T2851219233"] = "Transcription in progress..."
|
||||||
|
|
||||||
|
-- The configured transcription provider was not found.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T331613105"] = "The configured transcription provider was not found."
|
||||||
|
|
||||||
|
-- The configured transcription provider does not meet the minimum confidence level.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T3834149033"] = "The configured transcription provider does not meet the minimum confidence level."
|
||||||
|
|
||||||
|
-- An error occurred during transcription.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T588743762"] = "An error occurred during transcription."
|
||||||
|
|
||||||
|
-- No transcription provider is configured.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T663630295"] = "No transcription provider is configured."
|
||||||
|
|
||||||
|
-- The transcription result is empty.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T974954792"] = "The transcription result is empty."
|
||||||
|
|
||||||
-- Are you sure you want to delete the chat '{0}' in the workspace '{1}'?
|
-- Are you sure you want to delete the chat '{0}' in the workspace '{1}'?
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::WORKSPACES::T1016188706"] = "Are you sure you want to delete the chat '{0}' in the workspace '{1}'?"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::WORKSPACES::T1016188706"] = "Are you sure you want to delete the chat '{0}' in the workspace '{1}'?"
|
||||||
|
|
||||||
|
|||||||
@ -15,6 +15,6 @@
|
|||||||
UserAttributes="@SPELLCHECK_ATTRIBUTES"/>
|
UserAttributes="@SPELLCHECK_ATTRIBUTES"/>
|
||||||
|
|
||||||
<MudTooltip Text="@this.ToggleVisibilityTooltip">
|
<MudTooltip Text="@this.ToggleVisibilityTooltip">
|
||||||
<MudIconButton Icon="@this.InputTypeIcon" OnClick="() => this.ToggleVisibility()"/>
|
<MudIconButton Icon="@this.InputTypeIcon" OnClick="@(() => this.ToggleVisibility())"/>
|
||||||
</MudTooltip>
|
</MudTooltip>
|
||||||
</MudStack>
|
</MudStack>
|
||||||
@ -100,13 +100,13 @@ public partial class SettingsPanelEmbeddings : SettingsPanelBase
|
|||||||
if (dialogResult is null || dialogResult.Canceled)
|
if (dialogResult is null || dialogResult.Canceled)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
var deleteSecretResponse = await this.RustService.DeleteAPIKey(provider);
|
var deleteSecretResponse = await this.RustService.DeleteAPIKey(provider, SecretStoreType.EMBEDDING_PROVIDER);
|
||||||
if(deleteSecretResponse.Success)
|
if(deleteSecretResponse.Success)
|
||||||
{
|
{
|
||||||
this.SettingsManager.ConfigurationData.EmbeddingProviders.Remove(provider);
|
this.SettingsManager.ConfigurationData.EmbeddingProviders.Remove(provider);
|
||||||
await this.SettingsManager.StoreSettings();
|
await this.SettingsManager.StoreSettings();
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.UpdateEmbeddingProviders();
|
await this.UpdateEmbeddingProviders();
|
||||||
await this.MessageBus.SendMessage<bool>(this, Event.CONFIGURATION_CHANGED);
|
await this.MessageBus.SendMessage<bool>(this, Event.CONFIGURATION_CHANGED);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -107,7 +107,7 @@ public partial class SettingsPanelProviders : SettingsPanelBase
|
|||||||
if (dialogResult is null || dialogResult.Canceled)
|
if (dialogResult is null || dialogResult.Canceled)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
var deleteSecretResponse = await this.RustService.DeleteAPIKey(provider);
|
var deleteSecretResponse = await this.RustService.DeleteAPIKey(provider, SecretStoreType.LLM_PROVIDER);
|
||||||
if(deleteSecretResponse.Success)
|
if(deleteSecretResponse.Success)
|
||||||
{
|
{
|
||||||
this.SettingsManager.ConfigurationData.Providers.Remove(provider);
|
this.SettingsManager.ConfigurationData.Providers.Remove(provider);
|
||||||
|
|||||||
@ -10,7 +10,7 @@
|
|||||||
@T("Configured Transcription Providers")
|
@T("Configured Transcription Providers")
|
||||||
</MudText>
|
</MudText>
|
||||||
<MudJustifiedText Typo="Typo.body1" Class="mb-3">
|
<MudJustifiedText Typo="Typo.body1" Class="mb-3">
|
||||||
@T("With the support of transcription models, MindWork AI Studio can convert human speech into text. This is useful, for example, when you need to dictate text. You can choose from dedicated transcription models, but not multimodal LLMs (large language models) that can handle both speech and text. The configuration of multimodal models is done in the \"Configure providers\" section.")
|
@T("With the support of transcription models, MindWork AI Studio can convert human speech into text. This is useful, for example, when you need to dictate text. You can choose from dedicated transcription models, but not multimodal LLMs (large language models) that can handle both speech and text. The configuration of multimodal models is done in the 'Configure providers' section.")
|
||||||
</MudJustifiedText>
|
</MudJustifiedText>
|
||||||
|
|
||||||
<MudTable Items="@this.SettingsManager.ConfigurationData.TranscriptionProviders" Hover="@true" Class="border-dashed border rounded-lg">
|
<MudTable Items="@this.SettingsManager.ConfigurationData.TranscriptionProviders" Hover="@true" Class="border-dashed border rounded-lg">
|
||||||
|
|||||||
@ -100,13 +100,13 @@ public partial class SettingsPanelTranscription : SettingsPanelBase
|
|||||||
if (dialogResult is null || dialogResult.Canceled)
|
if (dialogResult is null || dialogResult.Canceled)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
var deleteSecretResponse = await this.RustService.DeleteAPIKey(provider);
|
var deleteSecretResponse = await this.RustService.DeleteAPIKey(provider, SecretStoreType.TRANSCRIPTION_PROVIDER);
|
||||||
if(deleteSecretResponse.Success)
|
if(deleteSecretResponse.Success)
|
||||||
{
|
{
|
||||||
this.SettingsManager.ConfigurationData.TranscriptionProviders.Remove(provider);
|
this.SettingsManager.ConfigurationData.TranscriptionProviders.Remove(provider);
|
||||||
await this.SettingsManager.StoreSettings();
|
await this.SettingsManager.StoreSettings();
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.UpdateTranscriptionProviders();
|
await this.UpdateTranscriptionProviders();
|
||||||
await this.MessageBus.SendMessage<bool>(this, Event.CONFIGURATION_CHANGED);
|
await this.MessageBus.SendMessage<bool>(this, Event.CONFIGURATION_CHANGED);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -6,11 +6,18 @@
|
|||||||
@if (PreviewFeatures.PRE_SPEECH_TO_TEXT_2026.IsEnabled(this.SettingsManager) && !string.IsNullOrWhiteSpace(this.SettingsManager.ConfigurationData.App.UseTranscriptionProvider))
|
@if (PreviewFeatures.PRE_SPEECH_TO_TEXT_2026.IsEnabled(this.SettingsManager) && !string.IsNullOrWhiteSpace(this.SettingsManager.ConfigurationData.App.UseTranscriptionProvider))
|
||||||
{
|
{
|
||||||
<MudTooltip Text="@this.Tooltip">
|
<MudTooltip Text="@this.Tooltip">
|
||||||
<MudToggleIconButton Toggled="@this.isRecording"
|
@if (this.isTranscribing)
|
||||||
ToggledChanged="@this.OnRecordingToggled"
|
{
|
||||||
Icon="@Icons.Material.Filled.Mic"
|
<MudProgressCircular Size="Size.Small" Indeterminate="true" Color="Color.Primary"/>
|
||||||
ToggledIcon="@Icons.Material.Filled.Stop"
|
}
|
||||||
Color="Color.Primary"
|
else
|
||||||
ToggledColor="Color.Error"/>
|
{
|
||||||
|
<MudToggleIconButton Toggled="@this.isRecording"
|
||||||
|
ToggledChanged="@this.OnRecordingToggled"
|
||||||
|
Icon="@Icons.Material.Filled.Mic"
|
||||||
|
ToggledIcon="@Icons.Material.Filled.Stop"
|
||||||
|
Color="Color.Primary"
|
||||||
|
ToggledColor="Color.Error"/>
|
||||||
|
}
|
||||||
</MudTooltip>
|
</MudTooltip>
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,3 +1,4 @@
|
|||||||
|
using AIStudio.Provider;
|
||||||
using AIStudio.Tools.MIME;
|
using AIStudio.Tools.MIME;
|
||||||
using AIStudio.Tools.Services;
|
using AIStudio.Tools.Services;
|
||||||
|
|
||||||
@ -9,21 +10,30 @@ public partial class VoiceRecorder : MSGComponentBase
|
|||||||
{
|
{
|
||||||
[Inject]
|
[Inject]
|
||||||
private ILogger<VoiceRecorder> Logger { get; init; } = null!;
|
private ILogger<VoiceRecorder> Logger { get; init; } = null!;
|
||||||
|
|
||||||
[Inject]
|
[Inject]
|
||||||
private IJSRuntime JsRuntime { get; init; } = null!;
|
private IJSRuntime JsRuntime { get; init; } = null!;
|
||||||
|
|
||||||
[Inject]
|
[Inject]
|
||||||
private RustService RustService { get; init; } = null!;
|
private RustService RustService { get; init; } = null!;
|
||||||
|
|
||||||
|
[Inject]
|
||||||
|
private ISnackbar Snackbar { get; init; } = null!;
|
||||||
|
|
||||||
private uint numReceivedChunks;
|
private uint numReceivedChunks;
|
||||||
private bool isRecording;
|
private bool isRecording;
|
||||||
|
private bool isTranscribing;
|
||||||
private FileStream? currentRecordingStream;
|
private FileStream? currentRecordingStream;
|
||||||
private string? currentRecordingPath;
|
private string? currentRecordingPath;
|
||||||
private string? currentRecordingMimeType;
|
private string? currentRecordingMimeType;
|
||||||
|
private string? finalRecordingPath;
|
||||||
private DotNetObjectReference<VoiceRecorder>? dotNetReference;
|
private DotNetObjectReference<VoiceRecorder>? dotNetReference;
|
||||||
|
|
||||||
private string Tooltip => this.isRecording ? T("Stop recording and start transcription") : T("Start recording your voice for a transcription");
|
private string Tooltip => this.isTranscribing
|
||||||
|
? T("Transcription in progress...")
|
||||||
|
: this.isRecording
|
||||||
|
? T("Stop recording and start transcription")
|
||||||
|
: T("Start recording your voice for a transcription");
|
||||||
|
|
||||||
private async Task OnRecordingToggled(bool toggled)
|
private async Task OnRecordingToggled(bool toggled)
|
||||||
{
|
{
|
||||||
@ -66,6 +76,10 @@ public partial class VoiceRecorder : MSGComponentBase
|
|||||||
|
|
||||||
this.isRecording = false;
|
this.isRecording = false;
|
||||||
this.StateHasChanged();
|
this.StateHasChanged();
|
||||||
|
|
||||||
|
// Start transcription if we have a recording and a configured provider:
|
||||||
|
if (this.finalRecordingPath is not null)
|
||||||
|
await this.TranscribeRecordingAsync();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -127,6 +141,7 @@ public partial class VoiceRecorder : MSGComponentBase
|
|||||||
|
|
||||||
private async Task FinalizeRecordingStream()
|
private async Task FinalizeRecordingStream()
|
||||||
{
|
{
|
||||||
|
this.finalRecordingPath = null;
|
||||||
if (this.currentRecordingStream is not null)
|
if (this.currentRecordingStream is not null)
|
||||||
{
|
{
|
||||||
await this.currentRecordingStream.FlushAsync();
|
await this.currentRecordingStream.FlushAsync();
|
||||||
@ -142,6 +157,7 @@ public partial class VoiceRecorder : MSGComponentBase
|
|||||||
if (File.Exists(this.currentRecordingPath))
|
if (File.Exists(this.currentRecordingPath))
|
||||||
{
|
{
|
||||||
File.Move(this.currentRecordingPath, newPath, overwrite: true);
|
File.Move(this.currentRecordingPath, newPath, overwrite: true);
|
||||||
|
this.finalRecordingPath = newPath;
|
||||||
this.Logger.LogInformation("Finalized audio recording over {NumChunks} streamed audio chunks to the file '{RecordingPath}'.", this.numReceivedChunks, newPath);
|
this.Logger.LogInformation("Finalized audio recording over {NumChunks} streamed audio chunks to the file '{RecordingPath}'.", this.numReceivedChunks, newPath);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -170,6 +186,114 @@ public partial class VoiceRecorder : MSGComponentBase
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async Task TranscribeRecordingAsync()
|
||||||
|
{
|
||||||
|
if (this.finalRecordingPath is null)
|
||||||
|
return;
|
||||||
|
|
||||||
|
this.isTranscribing = true;
|
||||||
|
this.StateHasChanged();
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// Get the configured transcription provider ID:
|
||||||
|
var transcriptionProviderId = this.SettingsManager.ConfigurationData.App.UseTranscriptionProvider;
|
||||||
|
if (string.IsNullOrWhiteSpace(transcriptionProviderId))
|
||||||
|
{
|
||||||
|
this.Logger.LogWarning("No transcription provider is configured.");
|
||||||
|
await this.MessageBus.SendError(new(Icons.Material.Filled.VoiceChat, this.T("No transcription provider is configured.")));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the transcription provider in the list of configured providers:
|
||||||
|
var transcriptionProviderSettings = this.SettingsManager.ConfigurationData.TranscriptionProviders
|
||||||
|
.FirstOrDefault(x => x.Id == transcriptionProviderId);
|
||||||
|
|
||||||
|
if (transcriptionProviderSettings is null)
|
||||||
|
{
|
||||||
|
this.Logger.LogWarning("The configured transcription provider with ID '{ProviderId}' was not found.", transcriptionProviderId);
|
||||||
|
await this.MessageBus.SendError(new(Icons.Material.Filled.VoiceChat, this.T("The configured transcription provider was not found.")));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check the confidence level:
|
||||||
|
var minimumLevel = this.SettingsManager.GetMinimumConfidenceLevel(Tools.Components.NONE);
|
||||||
|
var providerConfidence = transcriptionProviderSettings.UsedLLMProvider.GetConfidence(this.SettingsManager);
|
||||||
|
if (providerConfidence.Level < minimumLevel)
|
||||||
|
{
|
||||||
|
this.Logger.LogWarning(
|
||||||
|
"The configured transcription provider '{ProviderName}' has a confidence level of '{ProviderLevel}', which is below the minimum required level of '{MinimumLevel}'.",
|
||||||
|
transcriptionProviderSettings.Name,
|
||||||
|
providerConfidence.Level,
|
||||||
|
minimumLevel);
|
||||||
|
await this.MessageBus.SendError(new(Icons.Material.Filled.VoiceChat, this.T("The configured transcription provider does not meet the minimum confidence level.")));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the provider instance:
|
||||||
|
var provider = transcriptionProviderSettings.CreateProvider();
|
||||||
|
if (provider.Provider is LLMProviders.NONE)
|
||||||
|
{
|
||||||
|
this.Logger.LogError("Failed to create the transcription provider instance.");
|
||||||
|
await this.MessageBus.SendError(new(Icons.Material.Filled.VoiceChat, this.T("Failed to create the transcription provider.")));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Call the transcription API:
|
||||||
|
this.Logger.LogInformation("Starting transcription with provider '{ProviderName}' and model '{ModelName}'.", transcriptionProviderSettings.Name, transcriptionProviderSettings.Model.DisplayName);
|
||||||
|
var transcribedText = await provider.TranscribeAudioAsync(transcriptionProviderSettings.Model, this.finalRecordingPath, this.SettingsManager);
|
||||||
|
|
||||||
|
if (string.IsNullOrWhiteSpace(transcribedText))
|
||||||
|
{
|
||||||
|
this.Logger.LogWarning("The transcription result is empty.");
|
||||||
|
await this.MessageBus.SendWarning(new(Icons.Material.Filled.VoiceChat, this.T("The transcription result is empty.")));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove trailing and leading whitespace:
|
||||||
|
transcribedText = transcribedText.Trim();
|
||||||
|
|
||||||
|
// Replace line breaks with spaces:
|
||||||
|
transcribedText = transcribedText.Replace("\r", " ").Replace("\n", " ");
|
||||||
|
|
||||||
|
// Replace two spaces with a single space:
|
||||||
|
transcribedText = transcribedText.Replace(" ", " ");
|
||||||
|
|
||||||
|
this.Logger.LogInformation("Transcription completed successfully. Result length: {Length} characters.", transcribedText.Length);
|
||||||
|
|
||||||
|
// Play the transcription done sound effect:
|
||||||
|
await this.JsRuntime.InvokeVoidAsync("playSound", "/sounds/transcription_done.ogg");
|
||||||
|
|
||||||
|
// Copy the transcribed text to the clipboard:
|
||||||
|
await this.RustService.CopyText2Clipboard(this.Snackbar, transcribedText);
|
||||||
|
|
||||||
|
// Delete the recording file:
|
||||||
|
try
|
||||||
|
{
|
||||||
|
if (File.Exists(this.finalRecordingPath))
|
||||||
|
{
|
||||||
|
File.Delete(this.finalRecordingPath);
|
||||||
|
this.Logger.LogInformation("Deleted the recording file '{RecordingPath}'.", this.finalRecordingPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
this.Logger.LogError(ex, "Failed to delete the recording file '{RecordingPath}'.", this.finalRecordingPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
this.Logger.LogError(ex, "An error occurred during transcription.");
|
||||||
|
await this.MessageBus.SendError(new(Icons.Material.Filled.VoiceChat, this.T("An error occurred during transcription.")));
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
this.finalRecordingPath = null;
|
||||||
|
this.isTranscribing = false;
|
||||||
|
this.StateHasChanged();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private sealed class AudioRecordingResult
|
private sealed class AudioRecordingResult
|
||||||
{
|
{
|
||||||
public string MimeType { get; init; } = string.Empty;
|
public string MimeType { get; init; } = string.Empty;
|
||||||
|
|||||||
@ -25,7 +25,7 @@
|
|||||||
|
|
||||||
@if (this.DataLLMProvider.IsAPIKeyNeeded(this.DataHost))
|
@if (this.DataLLMProvider.IsAPIKeyNeeded(this.DataHost))
|
||||||
{
|
{
|
||||||
<SecretInputField @bind-Secret="@this.dataAPIKey" Label="@this.APIKeyText" Validation="@this.providerValidation.ValidatingAPIKey"/>
|
<SecretInputField Secret="@this.dataAPIKey" SecretChanged="@this.OnAPIKeyChanged" Label="@this.APIKeyText" Validation="@this.providerValidation.ValidatingAPIKey"/>
|
||||||
}
|
}
|
||||||
|
|
||||||
@if (this.DataLLMProvider.IsHostnameNeeded())
|
@if (this.DataLLMProvider.IsHostnameNeeded())
|
||||||
|
|||||||
@ -138,6 +138,9 @@ public partial class EmbeddingProviderDialog : MSGComponentBase, ISecretId
|
|||||||
|
|
||||||
protected override async Task OnInitializedAsync()
|
protected override async Task OnInitializedAsync()
|
||||||
{
|
{
|
||||||
|
// Call the base initialization first so that the I18N is ready:
|
||||||
|
await base.OnInitializedAsync();
|
||||||
|
|
||||||
// Configure the spellchecking for the instance name input:
|
// Configure the spellchecking for the instance name input:
|
||||||
this.SettingsManager.InjectSpellchecking(SPELLCHECK_ATTRIBUTES);
|
this.SettingsManager.InjectSpellchecking(SPELLCHECK_ATTRIBUTES);
|
||||||
|
|
||||||
@ -164,7 +167,7 @@ public partial class EmbeddingProviderDialog : MSGComponentBase, ISecretId
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Load the API key:
|
// Load the API key:
|
||||||
var requestedSecret = await this.RustService.GetAPIKey(this, isTrying: this.DataLLMProvider is LLMProviders.SELF_HOSTED);
|
var requestedSecret = await this.RustService.GetAPIKey(this, SecretStoreType.EMBEDDING_PROVIDER, isTrying: this.DataLLMProvider is LLMProviders.SELF_HOSTED);
|
||||||
if (requestedSecret.Success)
|
if (requestedSecret.Success)
|
||||||
this.dataAPIKey = await requestedSecret.Secret.Decrypt(this.encryption);
|
this.dataAPIKey = await requestedSecret.Secret.Decrypt(this.encryption);
|
||||||
else
|
else
|
||||||
@ -179,8 +182,6 @@ public partial class EmbeddingProviderDialog : MSGComponentBase, ISecretId
|
|||||||
|
|
||||||
await this.ReloadModels();
|
await this.ReloadModels();
|
||||||
}
|
}
|
||||||
|
|
||||||
await base.OnInitializedAsync();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected override async Task OnAfterRenderAsync(bool firstRender)
|
protected override async Task OnAfterRenderAsync(bool firstRender)
|
||||||
@ -197,7 +198,7 @@ public partial class EmbeddingProviderDialog : MSGComponentBase, ISecretId
|
|||||||
|
|
||||||
#region Implementation of ISecretId
|
#region Implementation of ISecretId
|
||||||
|
|
||||||
public string SecretId => this.DataId;
|
public string SecretId => this.DataLLMProvider.ToName();
|
||||||
|
|
||||||
public string SecretName => this.DataName;
|
public string SecretName => this.DataName;
|
||||||
|
|
||||||
@ -218,7 +219,7 @@ public partial class EmbeddingProviderDialog : MSGComponentBase, ISecretId
|
|||||||
if (!string.IsNullOrWhiteSpace(this.dataAPIKey))
|
if (!string.IsNullOrWhiteSpace(this.dataAPIKey))
|
||||||
{
|
{
|
||||||
// Store the API key in the OS secure storage:
|
// Store the API key in the OS secure storage:
|
||||||
var storeResponse = await this.RustService.SetAPIKey(this, this.dataAPIKey);
|
var storeResponse = await this.RustService.SetAPIKey(this, this.dataAPIKey, SecretStoreType.EMBEDDING_PROVIDER);
|
||||||
if (!storeResponse.Success)
|
if (!storeResponse.Success)
|
||||||
{
|
{
|
||||||
this.dataAPIKeyStorageIssue = string.Format(T("Failed to store the API key in the operating system. The message was: {0}. Please try again."), storeResponse.Issue);
|
this.dataAPIKeyStorageIssue = string.Format(T("Failed to store the API key in the operating system. The message was: {0}. Please try again."), storeResponse.Issue);
|
||||||
@ -239,6 +240,16 @@ public partial class EmbeddingProviderDialog : MSGComponentBase, ISecretId
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void Cancel() => this.MudDialog.Cancel();
|
private void Cancel() => this.MudDialog.Cancel();
|
||||||
|
|
||||||
|
private async Task OnAPIKeyChanged(string apiKey)
|
||||||
|
{
|
||||||
|
this.dataAPIKey = apiKey;
|
||||||
|
if (!string.IsNullOrWhiteSpace(this.dataAPIKeyStorageIssue))
|
||||||
|
{
|
||||||
|
this.dataAPIKeyStorageIssue = string.Empty;
|
||||||
|
await this.form.Validate();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private async Task ReloadModels()
|
private async Task ReloadModels()
|
||||||
{
|
{
|
||||||
|
|||||||
@ -22,7 +22,7 @@
|
|||||||
|
|
||||||
@if (this.DataLLMProvider.IsAPIKeyNeeded(this.DataHost))
|
@if (this.DataLLMProvider.IsAPIKeyNeeded(this.DataHost))
|
||||||
{
|
{
|
||||||
<SecretInputField @bind-Secret="@this.dataAPIKey" Label="@this.APIKeyText" Validation="@this.providerValidation.ValidatingAPIKey"/>
|
<SecretInputField Secret="@this.dataAPIKey" SecretChanged="@this.OnAPIKeyChanged" Label="@this.APIKeyText" Validation="@this.providerValidation.ValidatingAPIKey"/>
|
||||||
}
|
}
|
||||||
|
|
||||||
@if (this.DataLLMProvider.IsHostnameNeeded())
|
@if (this.DataLLMProvider.IsHostnameNeeded())
|
||||||
|
|||||||
@ -147,6 +147,9 @@ public partial class ProviderDialog : MSGComponentBase, ISecretId
|
|||||||
|
|
||||||
protected override async Task OnInitializedAsync()
|
protected override async Task OnInitializedAsync()
|
||||||
{
|
{
|
||||||
|
// Call the base initialization first so that the I18N is ready:
|
||||||
|
await base.OnInitializedAsync();
|
||||||
|
|
||||||
// Configure the spellchecking for the instance name input:
|
// Configure the spellchecking for the instance name input:
|
||||||
this.SettingsManager.InjectSpellchecking(SPELLCHECK_ATTRIBUTES);
|
this.SettingsManager.InjectSpellchecking(SPELLCHECK_ATTRIBUTES);
|
||||||
|
|
||||||
@ -177,7 +180,7 @@ public partial class ProviderDialog : MSGComponentBase, ISecretId
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Load the API key:
|
// Load the API key:
|
||||||
var requestedSecret = await this.RustService.GetAPIKey(this, isTrying: this.DataLLMProvider is LLMProviders.SELF_HOSTED);
|
var requestedSecret = await this.RustService.GetAPIKey(this, SecretStoreType.LLM_PROVIDER, isTrying: this.DataLLMProvider is LLMProviders.SELF_HOSTED);
|
||||||
if (requestedSecret.Success)
|
if (requestedSecret.Success)
|
||||||
this.dataAPIKey = await requestedSecret.Secret.Decrypt(this.encryption);
|
this.dataAPIKey = await requestedSecret.Secret.Decrypt(this.encryption);
|
||||||
else
|
else
|
||||||
@ -192,8 +195,6 @@ public partial class ProviderDialog : MSGComponentBase, ISecretId
|
|||||||
|
|
||||||
await this.ReloadModels();
|
await this.ReloadModels();
|
||||||
}
|
}
|
||||||
|
|
||||||
await base.OnInitializedAsync();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected override async Task OnAfterRenderAsync(bool firstRender)
|
protected override async Task OnAfterRenderAsync(bool firstRender)
|
||||||
@ -232,7 +233,7 @@ public partial class ProviderDialog : MSGComponentBase, ISecretId
|
|||||||
if (!string.IsNullOrWhiteSpace(this.dataAPIKey))
|
if (!string.IsNullOrWhiteSpace(this.dataAPIKey))
|
||||||
{
|
{
|
||||||
// Store the API key in the OS secure storage:
|
// Store the API key in the OS secure storage:
|
||||||
var storeResponse = await this.RustService.SetAPIKey(this, this.dataAPIKey);
|
var storeResponse = await this.RustService.SetAPIKey(this, this.dataAPIKey, SecretStoreType.LLM_PROVIDER);
|
||||||
if (!storeResponse.Success)
|
if (!storeResponse.Success)
|
||||||
{
|
{
|
||||||
this.dataAPIKeyStorageIssue = string.Format(T("Failed to store the API key in the operating system. The message was: {0}. Please try again."), storeResponse.Issue);
|
this.dataAPIKeyStorageIssue = string.Format(T("Failed to store the API key in the operating system. The message was: {0}. Please try again."), storeResponse.Issue);
|
||||||
@ -253,6 +254,16 @@ public partial class ProviderDialog : MSGComponentBase, ISecretId
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void Cancel() => this.MudDialog.Cancel();
|
private void Cancel() => this.MudDialog.Cancel();
|
||||||
|
|
||||||
|
private async Task OnAPIKeyChanged(string apiKey)
|
||||||
|
{
|
||||||
|
this.dataAPIKey = apiKey;
|
||||||
|
if (!string.IsNullOrWhiteSpace(this.dataAPIKeyStorageIssue))
|
||||||
|
{
|
||||||
|
this.dataAPIKeyStorageIssue = string.Empty;
|
||||||
|
await this.form.Validate();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private async Task ReloadModels()
|
private async Task ReloadModels()
|
||||||
{
|
{
|
||||||
|
|||||||
@ -25,7 +25,7 @@
|
|||||||
|
|
||||||
@if (this.DataLLMProvider.IsAPIKeyNeeded(this.DataHost))
|
@if (this.DataLLMProvider.IsAPIKeyNeeded(this.DataHost))
|
||||||
{
|
{
|
||||||
<SecretInputField @bind-Secret="@this.dataAPIKey" Label="@this.APIKeyText" Validation="@this.providerValidation.ValidatingAPIKey"/>
|
<SecretInputField Secret="@this.dataAPIKey" SecretChanged="@this.OnAPIKeyChanged" Label="@this.APIKeyText" Validation="@this.providerValidation.ValidatingAPIKey"/>
|
||||||
}
|
}
|
||||||
|
|
||||||
@if (this.DataLLMProvider.IsHostnameNeeded())
|
@if (this.DataLLMProvider.IsHostnameNeeded())
|
||||||
|
|||||||
@ -146,6 +146,9 @@ public partial class TranscriptionProviderDialog : MSGComponentBase, ISecretId
|
|||||||
|
|
||||||
protected override async Task OnInitializedAsync()
|
protected override async Task OnInitializedAsync()
|
||||||
{
|
{
|
||||||
|
// Call the base initialization first so that the I18N is ready:
|
||||||
|
await base.OnInitializedAsync();
|
||||||
|
|
||||||
// Configure the spellchecking for the instance name input:
|
// Configure the spellchecking for the instance name input:
|
||||||
this.SettingsManager.InjectSpellchecking(SPELLCHECK_ATTRIBUTES);
|
this.SettingsManager.InjectSpellchecking(SPELLCHECK_ATTRIBUTES);
|
||||||
|
|
||||||
@ -172,7 +175,7 @@ public partial class TranscriptionProviderDialog : MSGComponentBase, ISecretId
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Load the API key:
|
// Load the API key:
|
||||||
var requestedSecret = await this.RustService.GetAPIKey(this, isTrying: this.DataLLMProvider is LLMProviders.SELF_HOSTED);
|
var requestedSecret = await this.RustService.GetAPIKey(this, SecretStoreType.TRANSCRIPTION_PROVIDER, isTrying: this.DataLLMProvider is LLMProviders.SELF_HOSTED);
|
||||||
if (requestedSecret.Success)
|
if (requestedSecret.Success)
|
||||||
this.dataAPIKey = await requestedSecret.Secret.Decrypt(this.encryption);
|
this.dataAPIKey = await requestedSecret.Secret.Decrypt(this.encryption);
|
||||||
else
|
else
|
||||||
@ -187,8 +190,6 @@ public partial class TranscriptionProviderDialog : MSGComponentBase, ISecretId
|
|||||||
|
|
||||||
await this.ReloadModels();
|
await this.ReloadModels();
|
||||||
}
|
}
|
||||||
|
|
||||||
await base.OnInitializedAsync();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected override async Task OnAfterRenderAsync(bool firstRender)
|
protected override async Task OnAfterRenderAsync(bool firstRender)
|
||||||
@ -205,7 +206,7 @@ public partial class TranscriptionProviderDialog : MSGComponentBase, ISecretId
|
|||||||
|
|
||||||
#region Implementation of ISecretId
|
#region Implementation of ISecretId
|
||||||
|
|
||||||
public string SecretId => this.DataId;
|
public string SecretId => this.DataLLMProvider.ToName();
|
||||||
|
|
||||||
public string SecretName => this.DataName;
|
public string SecretName => this.DataName;
|
||||||
|
|
||||||
@ -226,7 +227,7 @@ public partial class TranscriptionProviderDialog : MSGComponentBase, ISecretId
|
|||||||
if (!string.IsNullOrWhiteSpace(this.dataAPIKey))
|
if (!string.IsNullOrWhiteSpace(this.dataAPIKey))
|
||||||
{
|
{
|
||||||
// Store the API key in the OS secure storage:
|
// Store the API key in the OS secure storage:
|
||||||
var storeResponse = await this.RustService.SetAPIKey(this, this.dataAPIKey);
|
var storeResponse = await this.RustService.SetAPIKey(this, this.dataAPIKey, SecretStoreType.TRANSCRIPTION_PROVIDER);
|
||||||
if (!storeResponse.Success)
|
if (!storeResponse.Success)
|
||||||
{
|
{
|
||||||
this.dataAPIKeyStorageIssue = string.Format(T("Failed to store the API key in the operating system. The message was: {0}. Please try again."), storeResponse.Issue);
|
this.dataAPIKeyStorageIssue = string.Format(T("Failed to store the API key in the operating system. The message was: {0}. Please try again."), storeResponse.Issue);
|
||||||
@ -247,6 +248,16 @@ public partial class TranscriptionProviderDialog : MSGComponentBase, ISecretId
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void Cancel() => this.MudDialog.Cancel();
|
private void Cancel() => this.MudDialog.Cancel();
|
||||||
|
|
||||||
|
private async Task OnAPIKeyChanged(string apiKey)
|
||||||
|
{
|
||||||
|
this.dataAPIKey = apiKey;
|
||||||
|
if (!string.IsNullOrWhiteSpace(this.dataAPIKeyStorageIssue))
|
||||||
|
{
|
||||||
|
this.dataAPIKeyStorageIssue = string.Empty;
|
||||||
|
await this.form.Validate();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private async Task ReloadModels()
|
private async Task ReloadModels()
|
||||||
{
|
{
|
||||||
|
|||||||
@ -75,7 +75,7 @@ CONFIG["TRANSCRIPTION_PROVIDERS"] = {}
|
|||||||
|
|
||||||
-- An example of a transcription provider configuration:
|
-- An example of a transcription provider configuration:
|
||||||
-- CONFIG["TRANSCRIPTION_PROVIDERS"][#CONFIG["TRANSCRIPTION_PROVIDERS"]+1] = {
|
-- CONFIG["TRANSCRIPTION_PROVIDERS"][#CONFIG["TRANSCRIPTION_PROVIDERS"]+1] = {
|
||||||
-- ["Id"] = "00000000-0000-0000-0000-000000000001",
|
-- ["Id"] = "00000000-0000-0000-0000-000000000000",
|
||||||
-- ["Name"] = "<user-friendly name for the transcription provider>",
|
-- ["Name"] = "<user-friendly name for the transcription provider>",
|
||||||
-- ["UsedLLMProvider"] = "SELF_HOSTED",
|
-- ["UsedLLMProvider"] = "SELF_HOSTED",
|
||||||
--
|
--
|
||||||
|
|||||||
@ -2130,6 +2130,9 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T32678
|
|||||||
-- Actions
|
-- Actions
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T3865031940"] = "Aktionen"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T3865031940"] = "Aktionen"
|
||||||
|
|
||||||
|
-- This embedding provider is managed by your organization.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T4062656589"] = "Dieser Einbettungsanbieter wird von Ihrer Organisation verwaltet."
|
||||||
|
|
||||||
-- No embeddings configured yet.
|
-- No embeddings configured yet.
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T4068015588"] = "Es wurden bislang keine Einbettungen konfiguriert."
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T4068015588"] = "Es wurden bislang keine Einbettungen konfiguriert."
|
||||||
|
|
||||||
@ -2289,6 +2292,9 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T40
|
|||||||
-- Configured Transcription Providers
|
-- Configured Transcription Providers
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T4210863523"] = "Konfigurierte Anbieter für Transkriptionen"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T4210863523"] = "Konfigurierte Anbieter für Transkriptionen"
|
||||||
|
|
||||||
|
-- This transcription provider is managed by your organization.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T756131076"] = "Dieser Anbieter für Transkriptionen wird von Ihrer Organisation verwaltet."
|
||||||
|
|
||||||
-- Open Dashboard
|
-- Open Dashboard
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T78223861"] = "Dashboard öffnen"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T78223861"] = "Dashboard öffnen"
|
||||||
|
|
||||||
@ -2296,7 +2302,7 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T78
|
|||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T789660305"] = "Möchten Sie den Anbieter für Transkriptionen „{0}“ wirklich löschen?"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T789660305"] = "Möchten Sie den Anbieter für Transkriptionen „{0}“ wirklich löschen?"
|
||||||
|
|
||||||
-- With the support of transcription models, MindWork AI Studio can convert human speech into text. This is useful, for example, when you need to dictate text. You can choose from dedicated transcription models, but not multimodal LLMs (large language models) that can handle both speech and text. The configuration of multimodal models is done in the \"Configure providers\" section.
|
-- With the support of transcription models, MindWork AI Studio can convert human speech into text. This is useful, for example, when you need to dictate text. You can choose from dedicated transcription models, but not multimodal LLMs (large language models) that can handle both speech and text. The configuration of multimodal models is done in the \"Configure providers\" section.
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T799338148"] = "Mit Unterstützung von Modellen für Transkriptionen kann MindWork AI Studio menschliche Sprache in Text umwandeln. Das ist zum Beispiel hilfreich, wenn Sie Texte diktieren möchten. Sie können aus speziellen Modellen für Transkriptionen wählen, jedoch nicht aus multimodalen LLMs (Large Language Models), die sowohl Sprache als auch Text verarbeiten können. Die Einrichtung multimodaler Modelle erfolgt im Abschnitt „Anbieter konfigurieren“."
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T584860404"] = "Mit Unterstützung von Modellen für Transkriptionen kann MindWork AI Studio menschliche Sprache in Text umwandeln. Das ist zum Beispiel hilfreich, wenn Sie Texte diktieren möchten. Sie können aus speziellen Modellen für Transkriptionen wählen, jedoch nicht aus multimodalen LLMs (Large Language Models), die sowohl Sprache als auch Text verarbeiten können. Die Einrichtung multimodaler Modelle erfolgt im Abschnitt „Anbieter für LLM konfigurieren“."
|
||||||
|
|
||||||
-- Provider
|
-- Provider
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T900237532"] = "Anbieter"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T900237532"] = "Anbieter"
|
||||||
@ -2382,12 +2388,33 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VISION::T428040679"] = "Erstellung von In
|
|||||||
-- Useful assistants
|
-- Useful assistants
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VISION::T586430036"] = "Nützliche Assistenten"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VISION::T586430036"] = "Nützliche Assistenten"
|
||||||
|
|
||||||
|
-- Failed to create the transcription provider.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T1689988905"] = "Der Anbieter für die Transkription konnte nicht erstellt werden."
|
||||||
|
|
||||||
-- Stop recording and start transcription
|
-- Stop recording and start transcription
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T224155287"] = "Aufnahme beenden und Transkription starten"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T224155287"] = "Aufnahme beenden und Transkription starten"
|
||||||
|
|
||||||
-- Start recording your voice for a transcription
|
-- Start recording your voice for a transcription
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T2372624045"] = "Beginnen Sie mit der Aufnahme Ihrer Stimme für eine Transkription"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T2372624045"] = "Beginnen Sie mit der Aufnahme Ihrer Stimme für eine Transkription"
|
||||||
|
|
||||||
|
-- Transcription in progress...
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T2851219233"] = "Transkription läuft …"
|
||||||
|
|
||||||
|
-- The configured transcription provider was not found.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T331613105"] = "Der konfigurierte Anbieter für die Transkription wurde nicht gefunden."
|
||||||
|
|
||||||
|
-- The configured transcription provider does not meet the minimum confidence level.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T3834149033"] = "Der konfigurierte Anbieter für die Transkription erfüllt nicht das erforderliche Mindestmaß an Vertrauenswürdigkeit."
|
||||||
|
|
||||||
|
-- An error occurred during transcription.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T588743762"] = "Während der Transkription ist ein Fehler aufgetreten."
|
||||||
|
|
||||||
|
-- No transcription provider is configured.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T663630295"] = "Es ist kein Anbieter für die Transkription konfiguriert."
|
||||||
|
|
||||||
|
-- The transcription result is empty.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T974954792"] = "Das Ergebnis der Transkription ist leer."
|
||||||
|
|
||||||
-- Are you sure you want to delete the chat '{0}' in the workspace '{1}'?
|
-- Are you sure you want to delete the chat '{0}' in the workspace '{1}'?
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::WORKSPACES::T1016188706"] = "Möchten Sie den Chat „{0}“ im Arbeitsbereich „{1}“ wirklich löschen?"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::WORKSPACES::T1016188706"] = "Möchten Sie den Chat „{0}“ im Arbeitsbereich „{1}“ wirklich löschen?"
|
||||||
|
|
||||||
|
|||||||
@ -2130,6 +2130,9 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T32678
|
|||||||
-- Actions
|
-- Actions
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T3865031940"] = "Actions"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T3865031940"] = "Actions"
|
||||||
|
|
||||||
|
-- This embedding provider is managed by your organization.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T4062656589"] = "This embedding provider is managed by your organization."
|
||||||
|
|
||||||
-- No embeddings configured yet.
|
-- No embeddings configured yet.
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T4068015588"] = "No embeddings configured yet."
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELEMBEDDINGS::T4068015588"] = "No embeddings configured yet."
|
||||||
|
|
||||||
@ -2289,15 +2292,18 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T40
|
|||||||
-- Configured Transcription Providers
|
-- Configured Transcription Providers
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T4210863523"] = "Configured Transcription Providers"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T4210863523"] = "Configured Transcription Providers"
|
||||||
|
|
||||||
|
-- With the support of transcription models, MindWork AI Studio can convert human speech into text. This is useful, for example, when you need to dictate text. You can choose from dedicated transcription models, but not multimodal LLMs (large language models) that can handle both speech and text. The configuration of multimodal models is done in the 'Configure providers' section.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T584860404"] = "With the support of transcription models, MindWork AI Studio can convert human speech into text. This is useful, for example, when you need to dictate text. You can choose from dedicated transcription models, but not multimodal LLMs (large language models) that can handle both speech and text. The configuration of multimodal models is done in the 'Configure LLM providers' section."
|
||||||
|
|
||||||
|
-- This transcription provider is managed by your organization.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T756131076"] = "This transcription provider is managed by your organization."
|
||||||
|
|
||||||
-- Open Dashboard
|
-- Open Dashboard
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T78223861"] = "Open Dashboard"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T78223861"] = "Open Dashboard"
|
||||||
|
|
||||||
-- Are you sure you want to delete the transcription provider '{0}'?
|
-- Are you sure you want to delete the transcription provider '{0}'?
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T789660305"] = "Are you sure you want to delete the transcription provider '{0}'?"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T789660305"] = "Are you sure you want to delete the transcription provider '{0}'?"
|
||||||
|
|
||||||
-- With the support of transcription models, MindWork AI Studio can convert human speech into text. This is useful, for example, when you need to dictate text. You can choose from dedicated transcription models, but not multimodal LLMs (large language models) that can handle both speech and text. The configuration of multimodal models is done in the \"Configure providers\" section.
|
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T799338148"] = "With the support of transcription models, MindWork AI Studio can convert human speech into text. This is useful, for example, when you need to dictate text. You can choose from dedicated transcription models, but not multimodal LLMs (large language models) that can handle both speech and text. The configuration of multimodal models is done in the \\\"Configure providers\\\" section."
|
|
||||||
|
|
||||||
-- Provider
|
-- Provider
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T900237532"] = "Provider"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::SETTINGS::SETTINGSPANELTRANSCRIPTION::T900237532"] = "Provider"
|
||||||
|
|
||||||
@ -2382,12 +2388,33 @@ UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VISION::T428040679"] = "Content creation"
|
|||||||
-- Useful assistants
|
-- Useful assistants
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VISION::T586430036"] = "Useful assistants"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VISION::T586430036"] = "Useful assistants"
|
||||||
|
|
||||||
|
-- Failed to create the transcription provider.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T1689988905"] = "Failed to create the transcription provider."
|
||||||
|
|
||||||
-- Stop recording and start transcription
|
-- Stop recording and start transcription
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T224155287"] = "Stop recording and start transcription"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T224155287"] = "Stop recording and start transcription"
|
||||||
|
|
||||||
-- Start recording your voice for a transcription
|
-- Start recording your voice for a transcription
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T2372624045"] = "Start recording your voice for a transcription"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T2372624045"] = "Start recording your voice for a transcription"
|
||||||
|
|
||||||
|
-- Transcription in progress...
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T2851219233"] = "Transcription in progress..."
|
||||||
|
|
||||||
|
-- The configured transcription provider was not found.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T331613105"] = "The configured transcription provider was not found."
|
||||||
|
|
||||||
|
-- The configured transcription provider does not meet the minimum confidence level.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T3834149033"] = "The configured transcription provider does not meet the minimum confidence level."
|
||||||
|
|
||||||
|
-- An error occurred during transcription.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T588743762"] = "An error occurred during transcription."
|
||||||
|
|
||||||
|
-- No transcription provider is configured.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T663630295"] = "No transcription provider is configured."
|
||||||
|
|
||||||
|
-- The transcription result is empty.
|
||||||
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::VOICERECORDER::T974954792"] = "The transcription result is empty."
|
||||||
|
|
||||||
-- Are you sure you want to delete the chat '{0}' in the workspace '{1}'?
|
-- Are you sure you want to delete the chat '{0}' in the workspace '{1}'?
|
||||||
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::WORKSPACES::T1016188706"] = "Are you sure you want to delete the chat '{0}' in the workspace '{1}'?"
|
UI_TEXT_CONTENT["AISTUDIO::COMPONENTS::WORKSPACES::T1016188706"] = "Are you sure you want to delete the chat '{0}' in the workspace '{1}'?"
|
||||||
|
|
||||||
|
|||||||
@ -25,7 +25,7 @@ public sealed class ProviderAlibabaCloud() : BaseProvider(LLMProviders.ALIBABA_C
|
|||||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||||
{
|
{
|
||||||
// Get the API key:
|
// Get the API key:
|
||||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||||
if(!requestedSecret.Success)
|
if(!requestedSecret.Success)
|
||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
@ -80,6 +80,12 @@ public sealed class ProviderAlibabaCloud() : BaseProvider(LLMProviders.ALIBABA_C
|
|||||||
yield break;
|
yield break;
|
||||||
}
|
}
|
||||||
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public override Task<string> TranscribeAudioAsync(Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default)
|
||||||
|
{
|
||||||
|
return Task.FromResult(string.Empty);
|
||||||
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
@ -111,7 +117,7 @@ public sealed class ProviderAlibabaCloud() : BaseProvider(LLMProviders.ALIBABA_C
|
|||||||
new Model("qwen2.5-vl-3b-instruct", "Qwen2.5-VL 3b"),
|
new Model("qwen2.5-vl-3b-instruct", "Qwen2.5-VL 3b"),
|
||||||
};
|
};
|
||||||
|
|
||||||
return this.LoadModels(["q"],token, apiKeyProvisional).ContinueWith(t => t.Result.Concat(additionalModels).OrderBy(x => x.Id).AsEnumerable(), token);
|
return this.LoadModels(["q"], SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional).ContinueWith(t => t.Result.Concat(additionalModels).OrderBy(x => x.Id).AsEnumerable(), token);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
@ -129,7 +135,7 @@ public sealed class ProviderAlibabaCloud() : BaseProvider(LLMProviders.ALIBABA_C
|
|||||||
new Model("text-embedding-v3", "text-embedding-v3"),
|
new Model("text-embedding-v3", "text-embedding-v3"),
|
||||||
};
|
};
|
||||||
|
|
||||||
return this.LoadModels(["text-embedding-"], token, apiKeyProvisional).ContinueWith(t => t.Result.Concat(additionalModels).OrderBy(x => x.Id).AsEnumerable(), token);
|
return this.LoadModels(["text-embedding-"], SecretStoreType.EMBEDDING_PROVIDER, token, apiKeyProvisional).ContinueWith(t => t.Result.Concat(additionalModels).OrderBy(x => x.Id).AsEnumerable(), token);
|
||||||
}
|
}
|
||||||
|
|
||||||
#region Overrides of BaseProvider
|
#region Overrides of BaseProvider
|
||||||
@ -144,12 +150,12 @@ public sealed class ProviderAlibabaCloud() : BaseProvider(LLMProviders.ALIBABA_C
|
|||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
private async Task<IEnumerable<Model>> LoadModels(string[] prefixes, CancellationToken token, string? apiKeyProvisional = null)
|
private async Task<IEnumerable<Model>> LoadModels(string[] prefixes, SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
|
||||||
{
|
{
|
||||||
var secretKey = apiKeyProvisional switch
|
var secretKey = apiKeyProvisional switch
|
||||||
{
|
{
|
||||||
not null => apiKeyProvisional,
|
not null => apiKeyProvisional,
|
||||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||||
{
|
{
|
||||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||||
_ => null,
|
_ => null,
|
||||||
|
|||||||
@ -23,7 +23,7 @@ public sealed class ProviderAnthropic() : BaseProvider(LLMProviders.ANTHROPIC, "
|
|||||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||||
{
|
{
|
||||||
// Get the API key:
|
// Get the API key:
|
||||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||||
if(!requestedSecret.Success)
|
if(!requestedSecret.Success)
|
||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
@ -107,6 +107,12 @@ public sealed class ProviderAnthropic() : BaseProvider(LLMProviders.ANTHROPIC, "
|
|||||||
yield break;
|
yield break;
|
||||||
}
|
}
|
||||||
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public override Task<string> TranscribeAudioAsync(Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default)
|
||||||
|
{
|
||||||
|
return Task.FromResult(string.Empty);
|
||||||
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
@ -121,7 +127,7 @@ public sealed class ProviderAnthropic() : BaseProvider(LLMProviders.ANTHROPIC, "
|
|||||||
new Model("claude-3-opus-latest", "Claude 3 Opus (Latest)"),
|
new Model("claude-3-opus-latest", "Claude 3 Opus (Latest)"),
|
||||||
};
|
};
|
||||||
|
|
||||||
return this.LoadModels(token, apiKeyProvisional).ContinueWith(t => t.Result.Concat(additionalModels).OrderBy(x => x.Id).AsEnumerable(), token);
|
return this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional).ContinueWith(t => t.Result.Concat(additionalModels).OrderBy(x => x.Id).AsEnumerable(), token);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
@ -144,12 +150,12 @@ public sealed class ProviderAnthropic() : BaseProvider(LLMProviders.ANTHROPIC, "
|
|||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
private async Task<IEnumerable<Model>> LoadModels(CancellationToken token, string? apiKeyProvisional = null)
|
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
|
||||||
{
|
{
|
||||||
var secretKey = apiKeyProvisional switch
|
var secretKey = apiKeyProvisional switch
|
||||||
{
|
{
|
||||||
not null => apiKeyProvisional,
|
not null => apiKeyProvisional,
|
||||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||||
{
|
{
|
||||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||||
_ => null,
|
_ => null,
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
using System.Net;
|
using System.Net;
|
||||||
|
using System.Net.Http.Headers;
|
||||||
using System.Runtime.CompilerServices;
|
using System.Runtime.CompilerServices;
|
||||||
using System.Text.Json;
|
using System.Text.Json;
|
||||||
using System.Text.Json.Serialization;
|
using System.Text.Json.Serialization;
|
||||||
@ -6,10 +7,15 @@ using System.Text.Json.Serialization;
|
|||||||
using AIStudio.Chat;
|
using AIStudio.Chat;
|
||||||
using AIStudio.Provider.Anthropic;
|
using AIStudio.Provider.Anthropic;
|
||||||
using AIStudio.Provider.OpenAI;
|
using AIStudio.Provider.OpenAI;
|
||||||
|
using AIStudio.Provider.SelfHosted;
|
||||||
using AIStudio.Settings;
|
using AIStudio.Settings;
|
||||||
|
using AIStudio.Tools.MIME;
|
||||||
using AIStudio.Tools.PluginSystem;
|
using AIStudio.Tools.PluginSystem;
|
||||||
|
using AIStudio.Tools.Rust;
|
||||||
using AIStudio.Tools.Services;
|
using AIStudio.Tools.Services;
|
||||||
|
|
||||||
|
using Host = AIStudio.Provider.SelfHosted.Host;
|
||||||
|
|
||||||
namespace AIStudio.Provider;
|
namespace AIStudio.Provider;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
@ -89,6 +95,9 @@ public abstract class BaseProvider : IProvider, ISecretId
|
|||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public abstract IAsyncEnumerable<ImageURL> StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, CancellationToken token = default);
|
public abstract IAsyncEnumerable<ImageURL> StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, CancellationToken token = default);
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public abstract Task<string> TranscribeAudioAsync(Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default);
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public abstract Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default);
|
public abstract Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default);
|
||||||
|
|
||||||
@ -536,6 +545,78 @@ public abstract class BaseProvider : IProvider, ISecretId
|
|||||||
streamReader.Dispose();
|
streamReader.Dispose();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected async Task<string> PerformStandardTranscriptionRequest(RequestedSecret requestedSecret, Model transcriptionModel, string audioFilePath, Host host = Host.NONE, CancellationToken token = default)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
using var form = new MultipartFormDataContent();
|
||||||
|
var mimeType = Builder.FromFilename(audioFilePath);
|
||||||
|
|
||||||
|
await using var fileStream = File.OpenRead(audioFilePath);
|
||||||
|
using var fileContent = new StreamContent(fileStream);
|
||||||
|
fileContent.Headers.ContentType = new MediaTypeHeaderValue(mimeType);
|
||||||
|
|
||||||
|
form.Add(fileContent, "file", Path.GetFileName(audioFilePath));
|
||||||
|
form.Add(new StringContent(transcriptionModel.Id), "model");
|
||||||
|
|
||||||
|
using var request = new HttpRequestMessage(HttpMethod.Post, host.TranscriptionURL());
|
||||||
|
request.Content = form;
|
||||||
|
|
||||||
|
// Handle the authorization header based on the provider:
|
||||||
|
switch (this.Provider)
|
||||||
|
{
|
||||||
|
case LLMProviders.SELF_HOSTED:
|
||||||
|
if(requestedSecret.Success)
|
||||||
|
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", await requestedSecret.Secret.Decrypt(ENCRYPTION));
|
||||||
|
|
||||||
|
break;
|
||||||
|
|
||||||
|
case LLMProviders.FIREWORKS:
|
||||||
|
if(!requestedSecret.Success)
|
||||||
|
{
|
||||||
|
this.logger.LogError("No valid API key available for transcription request.");
|
||||||
|
return string.Empty;
|
||||||
|
}
|
||||||
|
|
||||||
|
request.Headers.Add("Authorization", await requestedSecret.Secret.Decrypt(ENCRYPTION));
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
if(!requestedSecret.Success)
|
||||||
|
{
|
||||||
|
this.logger.LogError("No valid API key available for transcription request.");
|
||||||
|
return string.Empty;
|
||||||
|
}
|
||||||
|
|
||||||
|
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", await requestedSecret.Secret.Decrypt(ENCRYPTION));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
using var response = await this.httpClient.SendAsync(request, token);
|
||||||
|
var responseBody = response.Content.ReadAsStringAsync(token).Result;
|
||||||
|
|
||||||
|
if (!response.IsSuccessStatusCode)
|
||||||
|
{
|
||||||
|
this.logger.LogError("Transcription request failed with status code {ResponseStatusCode} and body: '{ResponseBody}'.", response.StatusCode, responseBody);
|
||||||
|
return string.Empty;
|
||||||
|
}
|
||||||
|
|
||||||
|
var transcriptionResponse = JsonSerializer.Deserialize<TranscriptionResponse>(responseBody, JSON_SERIALIZER_OPTIONS);
|
||||||
|
if(transcriptionResponse is null)
|
||||||
|
{
|
||||||
|
this.logger.LogError("Was not able to deserialize the transcription response.");
|
||||||
|
return string.Empty;
|
||||||
|
}
|
||||||
|
|
||||||
|
return transcriptionResponse.Text;
|
||||||
|
}
|
||||||
|
catch (Exception e)
|
||||||
|
{
|
||||||
|
this.logger.LogError("Failed to perform transcription request: '{Message}'.", e.Message);
|
||||||
|
return string.Empty;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Parse and convert API parameters from a provided JSON string into a dictionary,
|
/// Parse and convert API parameters from a provided JSON string into a dictionary,
|
||||||
/// optionally merging additional parameters and removing specific keys.
|
/// optionally merging additional parameters and removing specific keys.
|
||||||
|
|||||||
@ -25,7 +25,7 @@ public sealed class ProviderDeepSeek() : BaseProvider(LLMProviders.DEEP_SEEK, "h
|
|||||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||||
{
|
{
|
||||||
// Get the API key:
|
// Get the API key:
|
||||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||||
if(!requestedSecret.Success)
|
if(!requestedSecret.Success)
|
||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
@ -80,11 +80,17 @@ public sealed class ProviderDeepSeek() : BaseProvider(LLMProviders.DEEP_SEEK, "h
|
|||||||
yield break;
|
yield break;
|
||||||
}
|
}
|
||||||
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public override Task<string> TranscribeAudioAsync(Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default)
|
||||||
|
{
|
||||||
|
return Task.FromResult(string.Empty);
|
||||||
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
return this.LoadModels(token, apiKeyProvisional);
|
return this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
@ -107,12 +113,12 @@ public sealed class ProviderDeepSeek() : BaseProvider(LLMProviders.DEEP_SEEK, "h
|
|||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
private async Task<IEnumerable<Model>> LoadModels(CancellationToken token, string? apiKeyProvisional = null)
|
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
|
||||||
{
|
{
|
||||||
var secretKey = apiKeyProvisional switch
|
var secretKey = apiKeyProvisional switch
|
||||||
{
|
{
|
||||||
not null => apiKeyProvisional,
|
not null => apiKeyProvisional,
|
||||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||||
{
|
{
|
||||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||||
_ => null,
|
_ => null,
|
||||||
|
|||||||
@ -25,7 +25,7 @@ public class ProviderFireworks() : BaseProvider(LLMProviders.FIREWORKS, "https:/
|
|||||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||||
{
|
{
|
||||||
// Get the API key:
|
// Get the API key:
|
||||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||||
if(!requestedSecret.Success)
|
if(!requestedSecret.Success)
|
||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
@ -81,6 +81,13 @@ public class ProviderFireworks() : BaseProvider(LLMProviders.FIREWORKS, "https:/
|
|||||||
yield break;
|
yield break;
|
||||||
}
|
}
|
||||||
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public override async Task<string> TranscribeAudioAsync(Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default)
|
||||||
|
{
|
||||||
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.TRANSCRIPTION_PROVIDER);
|
||||||
|
return await this.PerformStandardTranscriptionRequest(requestedSecret, transcriptionModel, audioFilePath, token: token);
|
||||||
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
@ -103,11 +110,12 @@ public class ProviderFireworks() : BaseProvider(LLMProviders.FIREWORKS, "https:/
|
|||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
|
// Source: https://docs.fireworks.ai/api-reference/audio-transcriptions#param-model
|
||||||
return Task.FromResult<IEnumerable<Model>>(
|
return Task.FromResult<IEnumerable<Model>>(
|
||||||
new List<Model>
|
new List<Model>
|
||||||
{
|
{
|
||||||
new("whisper-v3", "Whisper v3"),
|
new("whisper-v3", "Whisper v3"),
|
||||||
new("whisper-v3-turbo", "Whisper v3 Turbo"),
|
// new("whisper-v3-turbo", "Whisper v3 Turbo"), // does not work
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -25,7 +25,7 @@ public sealed class ProviderGWDG() : BaseProvider(LLMProviders.GWDG, "https://ch
|
|||||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||||
{
|
{
|
||||||
// Get the API key:
|
// Get the API key:
|
||||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||||
if(!requestedSecret.Success)
|
if(!requestedSecret.Success)
|
||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
@ -80,11 +80,18 @@ public sealed class ProviderGWDG() : BaseProvider(LLMProviders.GWDG, "https://ch
|
|||||||
yield break;
|
yield break;
|
||||||
}
|
}
|
||||||
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public override async Task<string> TranscribeAudioAsync(Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default)
|
||||||
|
{
|
||||||
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.TRANSCRIPTION_PROVIDER);
|
||||||
|
return await this.PerformStandardTranscriptionRequest(requestedSecret, transcriptionModel, audioFilePath, token: token);
|
||||||
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override async Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override async Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
var models = await this.LoadModels(token, apiKeyProvisional);
|
var models = await this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
|
||||||
return models.Where(model => !model.Id.StartsWith("e5-mistral-7b-instruct", StringComparison.InvariantCultureIgnoreCase));
|
return models.Where(model => !model.Id.StartsWith("e5-mistral-7b-instruct", StringComparison.InvariantCultureIgnoreCase));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -97,7 +104,7 @@ public sealed class ProviderGWDG() : BaseProvider(LLMProviders.GWDG, "https://ch
|
|||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override async Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override async Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
var models = await this.LoadModels(token, apiKeyProvisional);
|
var models = await this.LoadModels(SecretStoreType.EMBEDDING_PROVIDER, token, apiKeyProvisional);
|
||||||
return models.Where(model => model.Id.StartsWith("e5-", StringComparison.InvariantCultureIgnoreCase));
|
return models.Where(model => model.Id.StartsWith("e5-", StringComparison.InvariantCultureIgnoreCase));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -114,12 +121,12 @@ public sealed class ProviderGWDG() : BaseProvider(LLMProviders.GWDG, "https://ch
|
|||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
private async Task<IEnumerable<Model>> LoadModels(CancellationToken token, string? apiKeyProvisional = null)
|
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
|
||||||
{
|
{
|
||||||
var secretKey = apiKeyProvisional switch
|
var secretKey = apiKeyProvisional switch
|
||||||
{
|
{
|
||||||
not null => apiKeyProvisional,
|
not null => apiKeyProvisional,
|
||||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||||
{
|
{
|
||||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||||
_ => null,
|
_ => null,
|
||||||
|
|||||||
@ -25,7 +25,7 @@ public class ProviderGoogle() : BaseProvider(LLMProviders.GOOGLE, "https://gener
|
|||||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||||
{
|
{
|
||||||
// Get the API key:
|
// Get the API key:
|
||||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||||
if(!requestedSecret.Success)
|
if(!requestedSecret.Success)
|
||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
@ -82,10 +82,16 @@ public class ProviderGoogle() : BaseProvider(LLMProviders.GOOGLE, "https://gener
|
|||||||
}
|
}
|
||||||
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public override Task<string> TranscribeAudioAsync(Provider.Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default)
|
||||||
|
{
|
||||||
|
return Task.FromResult(string.Empty);
|
||||||
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override async Task<IEnumerable<Provider.Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override async Task<IEnumerable<Provider.Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
var modelResponse = await this.LoadModels(token, apiKeyProvisional);
|
var modelResponse = await this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
|
||||||
if(modelResponse == default)
|
if(modelResponse == default)
|
||||||
return [];
|
return [];
|
||||||
|
|
||||||
@ -102,7 +108,7 @@ public class ProviderGoogle() : BaseProvider(LLMProviders.GOOGLE, "https://gener
|
|||||||
|
|
||||||
public override async Task<IEnumerable<Provider.Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override async Task<IEnumerable<Provider.Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
var modelResponse = await this.LoadModels(token, apiKeyProvisional);
|
var modelResponse = await this.LoadModels(SecretStoreType.EMBEDDING_PROVIDER, token, apiKeyProvisional);
|
||||||
if(modelResponse == default)
|
if(modelResponse == default)
|
||||||
return [];
|
return [];
|
||||||
|
|
||||||
@ -120,12 +126,12 @@ public class ProviderGoogle() : BaseProvider(LLMProviders.GOOGLE, "https://gener
|
|||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
private async Task<ModelsResponse> LoadModels(CancellationToken token, string? apiKeyProvisional = null)
|
private async Task<ModelsResponse> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
|
||||||
{
|
{
|
||||||
var secretKey = apiKeyProvisional switch
|
var secretKey = apiKeyProvisional switch
|
||||||
{
|
{
|
||||||
not null => apiKeyProvisional,
|
not null => apiKeyProvisional,
|
||||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||||
{
|
{
|
||||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||||
_ => null,
|
_ => null,
|
||||||
|
|||||||
@ -25,7 +25,7 @@ public class ProviderGroq() : BaseProvider(LLMProviders.GROQ, "https://api.groq.
|
|||||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||||
{
|
{
|
||||||
// Get the API key:
|
// Get the API key:
|
||||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||||
if(!requestedSecret.Success)
|
if(!requestedSecret.Success)
|
||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
@ -81,17 +81,23 @@ public class ProviderGroq() : BaseProvider(LLMProviders.GROQ, "https://api.groq.
|
|||||||
yield break;
|
yield break;
|
||||||
}
|
}
|
||||||
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public override Task<string> TranscribeAudioAsync(Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default)
|
||||||
|
{
|
||||||
|
return Task.FromResult(string.Empty);
|
||||||
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
return this.LoadModels(token, apiKeyProvisional);
|
return this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
return Task.FromResult<IEnumerable<Model>>(Array.Empty<Model>());
|
return Task.FromResult<IEnumerable<Model>>([]);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
@ -108,12 +114,12 @@ public class ProviderGroq() : BaseProvider(LLMProviders.GROQ, "https://api.groq.
|
|||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
private async Task<IEnumerable<Model>> LoadModels(CancellationToken token, string? apiKeyProvisional = null)
|
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
|
||||||
{
|
{
|
||||||
var secretKey = apiKeyProvisional switch
|
var secretKey = apiKeyProvisional switch
|
||||||
{
|
{
|
||||||
not null => apiKeyProvisional,
|
not null => apiKeyProvisional,
|
||||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||||
{
|
{
|
||||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||||
_ => null,
|
_ => null,
|
||||||
|
|||||||
@ -25,7 +25,7 @@ public sealed class ProviderHelmholtz() : BaseProvider(LLMProviders.HELMHOLTZ, "
|
|||||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||||
{
|
{
|
||||||
// Get the API key:
|
// Get the API key:
|
||||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||||
if(!requestedSecret.Success)
|
if(!requestedSecret.Success)
|
||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
@ -80,11 +80,17 @@ public sealed class ProviderHelmholtz() : BaseProvider(LLMProviders.HELMHOLTZ, "
|
|||||||
yield break;
|
yield break;
|
||||||
}
|
}
|
||||||
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public override Task<string> TranscribeAudioAsync(Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default)
|
||||||
|
{
|
||||||
|
return Task.FromResult(string.Empty);
|
||||||
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override async Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override async Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
var models = await this.LoadModels(token, apiKeyProvisional);
|
var models = await this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
|
||||||
return models.Where(model => !model.Id.StartsWith("text-", StringComparison.InvariantCultureIgnoreCase) &&
|
return models.Where(model => !model.Id.StartsWith("text-", StringComparison.InvariantCultureIgnoreCase) &&
|
||||||
!model.Id.StartsWith("alias-embedding", StringComparison.InvariantCultureIgnoreCase));
|
!model.Id.StartsWith("alias-embedding", StringComparison.InvariantCultureIgnoreCase));
|
||||||
}
|
}
|
||||||
@ -98,7 +104,7 @@ public sealed class ProviderHelmholtz() : BaseProvider(LLMProviders.HELMHOLTZ, "
|
|||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override async Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override async Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
var models = await this.LoadModels(token, apiKeyProvisional);
|
var models = await this.LoadModels(SecretStoreType.EMBEDDING_PROVIDER, token, apiKeyProvisional);
|
||||||
return models.Where(model =>
|
return models.Where(model =>
|
||||||
model.Id.StartsWith("alias-embedding", StringComparison.InvariantCultureIgnoreCase) ||
|
model.Id.StartsWith("alias-embedding", StringComparison.InvariantCultureIgnoreCase) ||
|
||||||
model.Id.StartsWith("text-", StringComparison.InvariantCultureIgnoreCase) ||
|
model.Id.StartsWith("text-", StringComparison.InvariantCultureIgnoreCase) ||
|
||||||
@ -113,12 +119,12 @@ public sealed class ProviderHelmholtz() : BaseProvider(LLMProviders.HELMHOLTZ, "
|
|||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
private async Task<IEnumerable<Model>> LoadModels(CancellationToken token, string? apiKeyProvisional = null)
|
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
|
||||||
{
|
{
|
||||||
var secretKey = apiKeyProvisional switch
|
var secretKey = apiKeyProvisional switch
|
||||||
{
|
{
|
||||||
not null => apiKeyProvisional,
|
not null => apiKeyProvisional,
|
||||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||||
{
|
{
|
||||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||||
_ => null,
|
_ => null,
|
||||||
|
|||||||
@ -30,7 +30,7 @@ public sealed class ProviderHuggingFace : BaseProvider
|
|||||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||||
{
|
{
|
||||||
// Get the API key:
|
// Get the API key:
|
||||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||||
if(!requestedSecret.Success)
|
if(!requestedSecret.Success)
|
||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
@ -85,6 +85,12 @@ public sealed class ProviderHuggingFace : BaseProvider
|
|||||||
yield break;
|
yield break;
|
||||||
}
|
}
|
||||||
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public override Task<string> TranscribeAudioAsync(Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default)
|
||||||
|
{
|
||||||
|
return Task.FromResult(string.Empty);
|
||||||
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
|
|||||||
@ -50,6 +50,16 @@ public interface IProvider
|
|||||||
/// <returns>The image completion stream.</returns>
|
/// <returns>The image completion stream.</returns>
|
||||||
public IAsyncEnumerable<ImageURL> StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, CancellationToken token = default);
|
public IAsyncEnumerable<ImageURL> StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, CancellationToken token = default);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Transcribe an audio file.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="transcriptionModel">The model to use for transcription.</param>
|
||||||
|
/// <param name="audioFilePath">The audio file path.</param>
|
||||||
|
/// <param name="settingsManager">The settings manager instance to use.</param>
|
||||||
|
/// <param name="token">The cancellation token.</param>
|
||||||
|
/// <returns>>The transcription result.</returns>
|
||||||
|
public Task<string> TranscribeAudioAsync(Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default);
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Load all possible text models that can be used with this provider.
|
/// Load all possible text models that can be used with this provider.
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
|||||||
@ -23,7 +23,7 @@ public sealed class ProviderMistral() : BaseProvider(LLMProviders.MISTRAL, "http
|
|||||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||||
{
|
{
|
||||||
// Get the API key:
|
// Get the API key:
|
||||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||||
if(!requestedSecret.Success)
|
if(!requestedSecret.Success)
|
||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
@ -81,11 +81,18 @@ public sealed class ProviderMistral() : BaseProvider(LLMProviders.MISTRAL, "http
|
|||||||
yield break;
|
yield break;
|
||||||
}
|
}
|
||||||
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public override async Task<string> TranscribeAudioAsync(Provider.Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default)
|
||||||
|
{
|
||||||
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.TRANSCRIPTION_PROVIDER);
|
||||||
|
return await this.PerformStandardTranscriptionRequest(requestedSecret, transcriptionModel, audioFilePath, token: token);
|
||||||
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override async Task<IEnumerable<Provider.Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override async Task<IEnumerable<Provider.Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
var modelResponse = await this.LoadModelList(apiKeyProvisional, token);
|
var modelResponse = await this.LoadModelList(SecretStoreType.LLM_PROVIDER, apiKeyProvisional, token);
|
||||||
if(modelResponse == default)
|
if(modelResponse == default)
|
||||||
return [];
|
return [];
|
||||||
|
|
||||||
@ -99,7 +106,7 @@ public sealed class ProviderMistral() : BaseProvider(LLMProviders.MISTRAL, "http
|
|||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override async Task<IEnumerable<Provider.Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override async Task<IEnumerable<Provider.Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
var modelResponse = await this.LoadModelList(apiKeyProvisional, token);
|
var modelResponse = await this.LoadModelList(SecretStoreType.EMBEDDING_PROVIDER, apiKeyProvisional, token);
|
||||||
if(modelResponse == default)
|
if(modelResponse == default)
|
||||||
return [];
|
return [];
|
||||||
|
|
||||||
@ -126,12 +133,12 @@ public sealed class ProviderMistral() : BaseProvider(LLMProviders.MISTRAL, "http
|
|||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
private async Task<ModelsResponse> LoadModelList(string? apiKeyProvisional, CancellationToken token)
|
private async Task<ModelsResponse> LoadModelList(SecretStoreType storeType, string? apiKeyProvisional, CancellationToken token)
|
||||||
{
|
{
|
||||||
var secretKey = apiKeyProvisional switch
|
var secretKey = apiKeyProvisional switch
|
||||||
{
|
{
|
||||||
not null => apiKeyProvisional,
|
not null => apiKeyProvisional,
|
||||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||||
{
|
{
|
||||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||||
_ => null,
|
_ => null,
|
||||||
|
|||||||
@ -38,6 +38,8 @@ public class NoProvider : IProvider
|
|||||||
yield break;
|
yield break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Task<string> TranscribeAudioAsync(Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default) => Task.FromResult(string.Empty);
|
||||||
|
|
||||||
public IReadOnlyCollection<Capability> GetModelCapabilities(Model model) => [ Capability.NONE ];
|
public IReadOnlyCollection<Capability> GetModelCapabilities(Model model) => [ Capability.NONE ];
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|||||||
@ -27,7 +27,7 @@ public sealed class ProviderOpenAI() : BaseProvider(LLMProviders.OPEN_AI, "https
|
|||||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||||
{
|
{
|
||||||
// Get the API key:
|
// Get the API key:
|
||||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||||
if(!requestedSecret.Success)
|
if(!requestedSecret.Success)
|
||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
@ -217,11 +217,18 @@ public sealed class ProviderOpenAI() : BaseProvider(LLMProviders.OPEN_AI, "https
|
|||||||
}
|
}
|
||||||
|
|
||||||
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public override async Task<string> TranscribeAudioAsync(Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default)
|
||||||
|
{
|
||||||
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.TRANSCRIPTION_PROVIDER);
|
||||||
|
return await this.PerformStandardTranscriptionRequest(requestedSecret, transcriptionModel, audioFilePath, token: token);
|
||||||
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override async Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override async Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
var models = await this.LoadModels(["chatgpt-", "gpt-", "o1-", "o3-", "o4-"], token, apiKeyProvisional);
|
var models = await this.LoadModels(SecretStoreType.LLM_PROVIDER, ["chatgpt-", "gpt-", "o1-", "o3-", "o4-"], token, apiKeyProvisional);
|
||||||
return models.Where(model => !model.Id.Contains("image", StringComparison.OrdinalIgnoreCase) &&
|
return models.Where(model => !model.Id.Contains("image", StringComparison.OrdinalIgnoreCase) &&
|
||||||
!model.Id.Contains("realtime", StringComparison.OrdinalIgnoreCase) &&
|
!model.Id.Contains("realtime", StringComparison.OrdinalIgnoreCase) &&
|
||||||
!model.Id.Contains("audio", StringComparison.OrdinalIgnoreCase) &&
|
!model.Id.Contains("audio", StringComparison.OrdinalIgnoreCase) &&
|
||||||
@ -232,31 +239,31 @@ public sealed class ProviderOpenAI() : BaseProvider(LLMProviders.OPEN_AI, "https
|
|||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
return this.LoadModels(["dall-e-", "gpt-image"], token, apiKeyProvisional);
|
return this.LoadModels(SecretStoreType.IMAGE_PROVIDER, ["dall-e-", "gpt-image"], token, apiKeyProvisional);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
return this.LoadModels(["text-embedding-"], token, apiKeyProvisional);
|
return this.LoadModels(SecretStoreType.EMBEDDING_PROVIDER, ["text-embedding-"], token, apiKeyProvisional);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override async Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override async Task<IEnumerable<Model>> GetTranscriptionModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
var models = await this.LoadModels(["whisper-", "gpt-"], token, apiKeyProvisional);
|
var models = await this.LoadModels(SecretStoreType.TRANSCRIPTION_PROVIDER, ["whisper-", "gpt-"], token, apiKeyProvisional);
|
||||||
return models.Where(model => model.Id.StartsWith("whisper-", StringComparison.InvariantCultureIgnoreCase) ||
|
return models.Where(model => model.Id.StartsWith("whisper-", StringComparison.InvariantCultureIgnoreCase) ||
|
||||||
model.Id.Contains("-transcribe", StringComparison.InvariantCultureIgnoreCase));
|
model.Id.Contains("-transcribe", StringComparison.InvariantCultureIgnoreCase));
|
||||||
}
|
}
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
private async Task<IEnumerable<Model>> LoadModels(string[] prefixes, CancellationToken token, string? apiKeyProvisional = null)
|
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, string[] prefixes, CancellationToken token, string? apiKeyProvisional = null)
|
||||||
{
|
{
|
||||||
var secretKey = apiKeyProvisional switch
|
var secretKey = apiKeyProvisional switch
|
||||||
{
|
{
|
||||||
not null => apiKeyProvisional,
|
not null => apiKeyProvisional,
|
||||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||||
{
|
{
|
||||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||||
_ => null,
|
_ => null,
|
||||||
|
|||||||
@ -28,7 +28,7 @@ public sealed class ProviderOpenRouter() : BaseProvider(LLMProviders.OPEN_ROUTER
|
|||||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||||
{
|
{
|
||||||
// Get the API key:
|
// Get the API key:
|
||||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||||
if(!requestedSecret.Success)
|
if(!requestedSecret.Success)
|
||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
@ -88,11 +88,17 @@ public sealed class ProviderOpenRouter() : BaseProvider(LLMProviders.OPEN_ROUTER
|
|||||||
yield break;
|
yield break;
|
||||||
}
|
}
|
||||||
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public override Task<string> TranscribeAudioAsync(Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default)
|
||||||
|
{
|
||||||
|
return Task.FromResult(string.Empty);
|
||||||
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
return this.LoadModels(token, apiKeyProvisional);
|
return this.LoadModels(SecretStoreType.LLM_PROVIDER, token, apiKeyProvisional);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
@ -115,12 +121,12 @@ public sealed class ProviderOpenRouter() : BaseProvider(LLMProviders.OPEN_ROUTER
|
|||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
private async Task<IEnumerable<Model>> LoadModels(CancellationToken token, string? apiKeyProvisional = null)
|
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, CancellationToken token, string? apiKeyProvisional = null)
|
||||||
{
|
{
|
||||||
var secretKey = apiKeyProvisional switch
|
var secretKey = apiKeyProvisional switch
|
||||||
{
|
{
|
||||||
not null => apiKeyProvisional,
|
not null => apiKeyProvisional,
|
||||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||||
{
|
{
|
||||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||||
_ => null,
|
_ => null,
|
||||||
@ -162,7 +168,7 @@ public sealed class ProviderOpenRouter() : BaseProvider(LLMProviders.OPEN_ROUTER
|
|||||||
var secretKey = apiKeyProvisional switch
|
var secretKey = apiKeyProvisional switch
|
||||||
{
|
{
|
||||||
not null => apiKeyProvisional,
|
not null => apiKeyProvisional,
|
||||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
_ => await RUST_SERVICE.GetAPIKey(this, SecretStoreType.EMBEDDING_PROVIDER) switch
|
||||||
{
|
{
|
||||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||||
_ => null,
|
_ => null,
|
||||||
|
|||||||
@ -34,7 +34,7 @@ public sealed class ProviderPerplexity() : BaseProvider(LLMProviders.PERPLEXITY,
|
|||||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||||
{
|
{
|
||||||
// Get the API key:
|
// Get the API key:
|
||||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||||
if(!requestedSecret.Success)
|
if(!requestedSecret.Success)
|
||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
@ -88,6 +88,12 @@ public sealed class ProviderPerplexity() : BaseProvider(LLMProviders.PERPLEXITY,
|
|||||||
yield break;
|
yield break;
|
||||||
}
|
}
|
||||||
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public override Task<string> TranscribeAudioAsync(Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default)
|
||||||
|
{
|
||||||
|
return Task.FromResult(string.Empty);
|
||||||
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
|
|||||||
@ -26,7 +26,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
|
|||||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||||
{
|
{
|
||||||
// Get the API key:
|
// Get the API key:
|
||||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, isTrying: true);
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER, isTrying: true);
|
||||||
|
|
||||||
// Prepare the system prompt:
|
// Prepare the system prompt:
|
||||||
var systemPrompt = new TextMessage
|
var systemPrompt = new TextMessage
|
||||||
@ -88,6 +88,13 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
|
|||||||
}
|
}
|
||||||
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public override async Task<string> TranscribeAudioAsync(Provider.Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default)
|
||||||
|
{
|
||||||
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.TRANSCRIPTION_PROVIDER, isTrying: true);
|
||||||
|
return await this.PerformStandardTranscriptionRequest(requestedSecret, transcriptionModel, audioFilePath, host, token);
|
||||||
|
}
|
||||||
|
|
||||||
public override async Task<IEnumerable<Provider.Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override async Task<IEnumerable<Provider.Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
@ -102,7 +109,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
|
|||||||
case Host.LM_STUDIO:
|
case Host.LM_STUDIO:
|
||||||
case Host.OLLAMA:
|
case Host.OLLAMA:
|
||||||
case Host.VLLM:
|
case Host.VLLM:
|
||||||
return await this.LoadModels(["embed"], [], token, apiKeyProvisional);
|
return await this.LoadModels( SecretStoreType.LLM_PROVIDER, ["embed"], [], token, apiKeyProvisional);
|
||||||
}
|
}
|
||||||
|
|
||||||
return [];
|
return [];
|
||||||
@ -129,7 +136,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
|
|||||||
case Host.LM_STUDIO:
|
case Host.LM_STUDIO:
|
||||||
case Host.OLLAMA:
|
case Host.OLLAMA:
|
||||||
case Host.VLLM:
|
case Host.VLLM:
|
||||||
return await this.LoadModels([], ["embed"], token, apiKeyProvisional);
|
return await this.LoadModels( SecretStoreType.EMBEDDING_PROVIDER, [], ["embed"], token, apiKeyProvisional);
|
||||||
}
|
}
|
||||||
|
|
||||||
return [];
|
return [];
|
||||||
@ -157,7 +164,7 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
|
|||||||
|
|
||||||
case Host.OLLAMA:
|
case Host.OLLAMA:
|
||||||
case Host.VLLM:
|
case Host.VLLM:
|
||||||
return this.LoadModels([], [], token, apiKeyProvisional);
|
return this.LoadModels(SecretStoreType.TRANSCRIPTION_PROVIDER, [], [], token, apiKeyProvisional);
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return Task.FromResult(Enumerable.Empty<Provider.Model>());
|
return Task.FromResult(Enumerable.Empty<Provider.Model>());
|
||||||
@ -172,12 +179,12 @@ public sealed class ProviderSelfHosted(Host host, string hostname) : BaseProvide
|
|||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
private async Task<IEnumerable<Provider.Model>> LoadModels(string[] ignorePhrases, string[] filterPhrases, CancellationToken token, string? apiKeyProvisional = null)
|
private async Task<IEnumerable<Provider.Model>> LoadModels(SecretStoreType storeType, string[] ignorePhrases, string[] filterPhrases, CancellationToken token, string? apiKeyProvisional = null)
|
||||||
{
|
{
|
||||||
var secretKey = apiKeyProvisional switch
|
var secretKey = apiKeyProvisional switch
|
||||||
{
|
{
|
||||||
not null => apiKeyProvisional,
|
not null => apiKeyProvisional,
|
||||||
_ => await RUST_SERVICE.GetAPIKey(this, isTrying: true) switch
|
_ => await RUST_SERVICE.GetAPIKey(this, storeType, isTrying: true) switch
|
||||||
{
|
{
|
||||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||||
_ => null,
|
_ => null,
|
||||||
|
|||||||
3
app/MindWork AI Studio/Provider/TranscriptionResponse.cs
Normal file
3
app/MindWork AI Studio/Provider/TranscriptionResponse.cs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
namespace AIStudio.Provider;
|
||||||
|
|
||||||
|
public sealed record TranscriptionResponse(string Text);
|
||||||
@ -25,7 +25,7 @@ public sealed class ProviderX() : BaseProvider(LLMProviders.X, "https://api.x.ai
|
|||||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||||
{
|
{
|
||||||
// Get the API key:
|
// Get the API key:
|
||||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, SecretStoreType.LLM_PROVIDER);
|
||||||
if(!requestedSecret.Success)
|
if(!requestedSecret.Success)
|
||||||
yield break;
|
yield break;
|
||||||
|
|
||||||
@ -81,11 +81,17 @@ public sealed class ProviderX() : BaseProvider(LLMProviders.X, "https://api.x.ai
|
|||||||
yield break;
|
yield break;
|
||||||
}
|
}
|
||||||
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public override Task<string> TranscribeAudioAsync(Model transcriptionModel, string audioFilePath, SettingsManager settingsManager, CancellationToken token = default)
|
||||||
|
{
|
||||||
|
return Task.FromResult(string.Empty);
|
||||||
|
}
|
||||||
|
|
||||||
/// <inheritdoc />
|
/// <inheritdoc />
|
||||||
public override async Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
public override async Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
{
|
{
|
||||||
var models = await this.LoadModels(["grok-"], token, apiKeyProvisional);
|
var models = await this.LoadModels(SecretStoreType.LLM_PROVIDER, ["grok-"], token, apiKeyProvisional);
|
||||||
return models.Where(n => !n.Id.Contains("-image", StringComparison.OrdinalIgnoreCase));
|
return models.Where(n => !n.Id.Contains("-image", StringComparison.OrdinalIgnoreCase));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -109,12 +115,12 @@ public sealed class ProviderX() : BaseProvider(LLMProviders.X, "https://api.x.ai
|
|||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
private async Task<IEnumerable<Model>> LoadModels(string[] prefixes, CancellationToken token, string? apiKeyProvisional = null)
|
private async Task<IEnumerable<Model>> LoadModels(SecretStoreType storeType, string[] prefixes, CancellationToken token, string? apiKeyProvisional = null)
|
||||||
{
|
{
|
||||||
var secretKey = apiKeyProvisional switch
|
var secretKey = apiKeyProvisional switch
|
||||||
{
|
{
|
||||||
not null => apiKeyProvisional,
|
not null => apiKeyProvisional,
|
||||||
_ => await RUST_SERVICE.GetAPIKey(this) switch
|
_ => await RUST_SERVICE.GetAPIKey(this, storeType) switch
|
||||||
{
|
{
|
||||||
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
{ Success: true } result => await result.Secret.Decrypt(ENCRYPTION),
|
||||||
_ => null,
|
_ => null,
|
||||||
|
|||||||
@ -8,6 +8,68 @@ public class Builder
|
|||||||
|
|
||||||
public static Builder Create() => new();
|
public static Builder Create() => new();
|
||||||
|
|
||||||
|
public static MIMEType FromFilename(string filenameOrPath)
|
||||||
|
{
|
||||||
|
var extension = Path.GetExtension(filenameOrPath);
|
||||||
|
if (string.IsNullOrEmpty(extension))
|
||||||
|
throw new ArgumentException("Filename or path does not have a valid extension.", nameof(filenameOrPath));
|
||||||
|
|
||||||
|
extension = extension.TrimStart('.').ToLowerInvariant();
|
||||||
|
|
||||||
|
var builder = Create();
|
||||||
|
return extension switch
|
||||||
|
{
|
||||||
|
// Application types
|
||||||
|
"pdf" => builder.UseApplication().UseSubtype(ApplicationSubtype.PDF).Build(),
|
||||||
|
"zip" => builder.UseApplication().UseSubtype(ApplicationSubtype.ZIP).Build(),
|
||||||
|
"doc" => builder.UseApplication().UseSubtype(ApplicationSubtype.WORD_OLD).Build(),
|
||||||
|
"docx" => builder.UseApplication().UseSubtype(ApplicationSubtype.WORD).Build(),
|
||||||
|
"xls" => builder.UseApplication().UseSubtype(ApplicationSubtype.EXCEL_OLD).Build(),
|
||||||
|
"xlsx" => builder.UseApplication().UseSubtype(ApplicationSubtype.EXCEL).Build(),
|
||||||
|
"ppt" => builder.UseApplication().UseSubtype(ApplicationSubtype.POWERPOINT_OLD).Build(),
|
||||||
|
"pptx" => builder.UseApplication().UseSubtype(ApplicationSubtype.POWERPOINT).Build(),
|
||||||
|
"json" => builder.UseApplication().UseSubtype(ApplicationSubtype.JSON).Build(),
|
||||||
|
"xml" => builder.UseApplication().UseSubtype(ApplicationSubtype.XML).Build(),
|
||||||
|
|
||||||
|
// Text types
|
||||||
|
"txt" => builder.UseText().UseSubtype(TextSubtype.PLAIN).Build(),
|
||||||
|
"html" or "htm" => builder.UseText().UseSubtype(TextSubtype.HTML).Build(),
|
||||||
|
"css" => builder.UseText().UseSubtype(TextSubtype.CSS).Build(),
|
||||||
|
"csv" => builder.UseText().UseSubtype(TextSubtype.CSV).Build(),
|
||||||
|
"js" => builder.UseText().UseSubtype(TextSubtype.JAVASCRIPT).Build(),
|
||||||
|
"md" or "markdown" => builder.UseText().UseSubtype(TextSubtype.MARKDOWN).Build(),
|
||||||
|
|
||||||
|
// Audio types
|
||||||
|
"wav" => builder.UseAudio().UseSubtype(AudioSubtype.WAV).Build(),
|
||||||
|
"mp3" => builder.UseAudio().UseSubtype(AudioSubtype.MP3).Build(),
|
||||||
|
"ogg" => builder.UseAudio().UseSubtype(AudioSubtype.OGG).Build(),
|
||||||
|
"aac" => builder.UseAudio().UseSubtype(AudioSubtype.AAC).Build(),
|
||||||
|
"flac" => builder.UseAudio().UseSubtype(AudioSubtype.FLAC).Build(),
|
||||||
|
"m4a" => builder.UseAudio().UseSubtype(AudioSubtype.M4A).Build(),
|
||||||
|
"aiff" or "aif" => builder.UseAudio().UseSubtype(AudioSubtype.AIFF).Build(),
|
||||||
|
"mpga" => builder.UseAudio().UseSubtype(AudioSubtype.MPEG).Build(),
|
||||||
|
"webm" => builder.UseAudio().UseSubtype(AudioSubtype.WEBM).Build(),
|
||||||
|
|
||||||
|
// Image types
|
||||||
|
"jpg" or "jpeg" => builder.UseImage().UseSubtype(ImageSubtype.JPEG).Build(),
|
||||||
|
"png" => builder.UseImage().UseSubtype(ImageSubtype.PNG).Build(),
|
||||||
|
"gif" => builder.UseImage().UseSubtype(ImageSubtype.GIF).Build(),
|
||||||
|
"tiff" or "tif" => builder.UseImage().UseSubtype(ImageSubtype.TIFF).Build(),
|
||||||
|
"webp" => builder.UseImage().UseSubtype(ImageSubtype.WEBP).Build(),
|
||||||
|
"svg" => builder.UseImage().UseSubtype(ImageSubtype.SVG).Build(),
|
||||||
|
"heic" => builder.UseImage().UseSubtype(ImageSubtype.HEIC).Build(),
|
||||||
|
|
||||||
|
// Video types
|
||||||
|
"mp4" => builder.UseVideo().UseSubtype(VideoSubtype.MP4).Build(),
|
||||||
|
"avi" => builder.UseVideo().UseSubtype(VideoSubtype.AVI).Build(),
|
||||||
|
"mov" => builder.UseVideo().UseSubtype(VideoSubtype.MOV).Build(),
|
||||||
|
"mkv" => builder.UseVideo().UseSubtype(VideoSubtype.MKV).Build(),
|
||||||
|
"mpeg" or "mpg" => builder.UseVideo().UseSubtype(VideoSubtype.MPEG).Build(),
|
||||||
|
|
||||||
|
_ => throw new ArgumentException($"Unsupported file extension: '.{extension}'.", nameof(filenameOrPath))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
public static MIMEType FromTextRepresentation(string textRepresentation)
|
public static MIMEType FromTextRepresentation(string textRepresentation)
|
||||||
{
|
{
|
||||||
var parts = textRepresentation.Split('/');
|
var parts = textRepresentation.Split('/');
|
||||||
|
|||||||
32
app/MindWork AI Studio/Tools/SecretStoreType.cs
Normal file
32
app/MindWork AI Studio/Tools/SecretStoreType.cs
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
namespace AIStudio.Tools;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Represents the type of secret store used for API keys.
|
||||||
|
/// </summary>
|
||||||
|
/// <remarks>
|
||||||
|
/// Different provider types use different prefixes for storing API keys.
|
||||||
|
/// This prevents collisions when the same instance name is used across
|
||||||
|
/// different provider types (e.g., LLM, Embedding, Transcription).
|
||||||
|
/// </remarks>
|
||||||
|
public enum SecretStoreType
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// LLM provider secrets. Uses the legacy "provider::" prefix for backward compatibility.
|
||||||
|
/// </summary>
|
||||||
|
LLM_PROVIDER = 0,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Embedding provider secrets. Uses the "embedding::" prefix.
|
||||||
|
/// </summary>
|
||||||
|
EMBEDDING_PROVIDER,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Transcription provider secrets. Uses the "transcription::" prefix.
|
||||||
|
/// </summary>
|
||||||
|
TRANSCRIPTION_PROVIDER,
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Image provider secrets. Uses the "image::" prefix.
|
||||||
|
/// </summary>
|
||||||
|
IMAGE_PROVIDER,
|
||||||
|
}
|
||||||
21
app/MindWork AI Studio/Tools/SecretStoreTypeExtensions.cs
Normal file
21
app/MindWork AI Studio/Tools/SecretStoreTypeExtensions.cs
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
namespace AIStudio.Tools;
|
||||||
|
|
||||||
|
public static class SecretStoreTypeExtensions
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Gets the prefix string associated with the SecretStoreType.
|
||||||
|
/// </summary>
|
||||||
|
/// <remarks>
|
||||||
|
/// LLM_PROVIDER uses the legacy "provider" prefix for backward compatibility.
|
||||||
|
/// </remarks>
|
||||||
|
/// <param name="type">The SecretStoreType enum value.</param>
|
||||||
|
/// <returns>>The corresponding prefix string.</returns>
|
||||||
|
public static string Prefix(this SecretStoreType type) => type switch
|
||||||
|
{
|
||||||
|
SecretStoreType.LLM_PROVIDER => "provider",
|
||||||
|
SecretStoreType.EMBEDDING_PROVIDER => "embedding",
|
||||||
|
SecretStoreType.TRANSCRIPTION_PROVIDER => "transcription",
|
||||||
|
|
||||||
|
_ => "provider",
|
||||||
|
};
|
||||||
|
}
|
||||||
@ -9,68 +9,76 @@ public sealed partial class RustService
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="secretId">The secret ID to get the API key for.</param>
|
/// <param name="secretId">The secret ID to get the API key for.</param>
|
||||||
/// <param name="isTrying">Indicates if we are trying to get the API key. In that case, we don't log errors.</param>
|
/// <param name="isTrying">Indicates if we are trying to get the API key. In that case, we don't log errors.</param>
|
||||||
|
/// <param name="storeType">The secret store type. Defaults to LLM_PROVIDER for backward compatibility.</param>
|
||||||
/// <returns>The requested secret.</returns>
|
/// <returns>The requested secret.</returns>
|
||||||
public async Task<RequestedSecret> GetAPIKey(ISecretId secretId, bool isTrying = false)
|
public async Task<RequestedSecret> GetAPIKey(ISecretId secretId, SecretStoreType storeType, bool isTrying = false)
|
||||||
{
|
{
|
||||||
var secretRequest = new SelectSecretRequest($"provider::{secretId.SecretId}::{secretId.SecretName}::api_key", Environment.UserName, isTrying);
|
var prefix = storeType.Prefix();
|
||||||
|
var secretRequest = new SelectSecretRequest($"{prefix}::{secretId.SecretId}::{secretId.SecretName}::api_key", Environment.UserName, isTrying);
|
||||||
var result = await this.http.PostAsJsonAsync("/secrets/get", secretRequest, this.jsonRustSerializerOptions);
|
var result = await this.http.PostAsJsonAsync("/secrets/get", secretRequest, this.jsonRustSerializerOptions);
|
||||||
if (!result.IsSuccessStatusCode)
|
if (!result.IsSuccessStatusCode)
|
||||||
{
|
{
|
||||||
if(!isTrying)
|
if(!isTrying)
|
||||||
this.logger!.LogError($"Failed to get the API key for secret ID '{secretId.SecretId}' due to an API issue: '{result.StatusCode}'");
|
this.logger!.LogError($"Failed to get the API key for '{prefix}::{secretId.SecretId}::{secretId.SecretName}::api_key' due to an API issue: '{result.StatusCode}'");
|
||||||
return new RequestedSecret(false, new EncryptedText(string.Empty), TB("Failed to get the API key due to an API issue."));
|
return new RequestedSecret(false, new EncryptedText(string.Empty), TB("Failed to get the API key due to an API issue."));
|
||||||
}
|
}
|
||||||
|
|
||||||
var secret = await result.Content.ReadFromJsonAsync<RequestedSecret>(this.jsonRustSerializerOptions);
|
var secret = await result.Content.ReadFromJsonAsync<RequestedSecret>(this.jsonRustSerializerOptions);
|
||||||
if (!secret.Success && !isTrying)
|
if (!secret.Success && !isTrying)
|
||||||
this.logger!.LogError($"Failed to get the API key for secret ID '{secretId.SecretId}': '{secret.Issue}'");
|
this.logger!.LogError($"Failed to get the API key for '{prefix}::{secretId.SecretId}::{secretId.SecretName}::api_key': '{secret.Issue}'");
|
||||||
|
|
||||||
|
this.logger!.LogDebug($"Successfully retrieved the API key for '{prefix}::{secretId.SecretId}::{secretId.SecretName}::api_key'.");
|
||||||
return secret;
|
return secret;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Try to store the API key for the given secret ID.
|
/// Try to store the API key for the given secret ID.
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="secretId">The secret ID to store the API key for.</param>
|
/// <param name="secretId">The secret ID to store the API key for.</param>
|
||||||
/// <param name="key">The API key to store.</param>
|
/// <param name="key">The API key to store.</param>
|
||||||
|
/// <param name="storeType">The secret store type. Defaults to LLM_PROVIDER for backward compatibility.</param>
|
||||||
/// <returns>The store secret response.</returns>
|
/// <returns>The store secret response.</returns>
|
||||||
public async Task<StoreSecretResponse> SetAPIKey(ISecretId secretId, string key)
|
public async Task<StoreSecretResponse> SetAPIKey(ISecretId secretId, string key, SecretStoreType storeType)
|
||||||
{
|
{
|
||||||
|
var prefix = storeType.Prefix();
|
||||||
var encryptedKey = await this.encryptor!.Encrypt(key);
|
var encryptedKey = await this.encryptor!.Encrypt(key);
|
||||||
var request = new StoreSecretRequest($"provider::{secretId.SecretId}::{secretId.SecretName}::api_key", Environment.UserName, encryptedKey);
|
var request = new StoreSecretRequest($"{prefix}::{secretId.SecretId}::{secretId.SecretName}::api_key", Environment.UserName, encryptedKey);
|
||||||
var result = await this.http.PostAsJsonAsync("/secrets/store", request, this.jsonRustSerializerOptions);
|
var result = await this.http.PostAsJsonAsync("/secrets/store", request, this.jsonRustSerializerOptions);
|
||||||
if (!result.IsSuccessStatusCode)
|
if (!result.IsSuccessStatusCode)
|
||||||
{
|
{
|
||||||
this.logger!.LogError($"Failed to store the API key for secret ID '{secretId.SecretId}' due to an API issue: '{result.StatusCode}'");
|
this.logger!.LogError($"Failed to store the API key for '{prefix}::{secretId.SecretId}::{secretId.SecretName}::api_key' due to an API issue: '{result.StatusCode}'");
|
||||||
return new StoreSecretResponse(false, TB("Failed to get the API key due to an API issue."));
|
return new StoreSecretResponse(false, TB("Failed to get the API key due to an API issue."));
|
||||||
}
|
}
|
||||||
|
|
||||||
var state = await result.Content.ReadFromJsonAsync<StoreSecretResponse>(this.jsonRustSerializerOptions);
|
var state = await result.Content.ReadFromJsonAsync<StoreSecretResponse>(this.jsonRustSerializerOptions);
|
||||||
if (!state.Success)
|
if (!state.Success)
|
||||||
this.logger!.LogError($"Failed to store the API key for secret ID '{secretId.SecretId}': '{state.Issue}'");
|
this.logger!.LogError($"Failed to store the API key for '{prefix}::{secretId.SecretId}::{secretId.SecretName}::api_key': '{state.Issue}'");
|
||||||
|
|
||||||
|
this.logger!.LogDebug($"Successfully stored the API key for '{prefix}::{secretId.SecretId}::{secretId.SecretName}::api_key'.");
|
||||||
return state;
|
return state;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Tries to delete the API key for the given secret ID.
|
/// Tries to delete the API key for the given secret ID.
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="secretId">The secret ID to delete the API key for.</param>
|
/// <param name="secretId">The secret ID to delete the API key for.</param>
|
||||||
|
/// <param name="storeType">The secret store type. Defaults to LLM_PROVIDER for backward compatibility.</param>
|
||||||
/// <returns>The delete secret response.</returns>
|
/// <returns>The delete secret response.</returns>
|
||||||
public async Task<DeleteSecretResponse> DeleteAPIKey(ISecretId secretId)
|
public async Task<DeleteSecretResponse> DeleteAPIKey(ISecretId secretId, SecretStoreType storeType)
|
||||||
{
|
{
|
||||||
var request = new SelectSecretRequest($"provider::{secretId.SecretId}::{secretId.SecretName}::api_key", Environment.UserName, false);
|
var prefix = storeType.Prefix();
|
||||||
|
var request = new SelectSecretRequest($"{prefix}::{secretId.SecretId}::{secretId.SecretName}::api_key", Environment.UserName, false);
|
||||||
var result = await this.http.PostAsJsonAsync("/secrets/delete", request, this.jsonRustSerializerOptions);
|
var result = await this.http.PostAsJsonAsync("/secrets/delete", request, this.jsonRustSerializerOptions);
|
||||||
if (!result.IsSuccessStatusCode)
|
if (!result.IsSuccessStatusCode)
|
||||||
{
|
{
|
||||||
this.logger!.LogError($"Failed to delete the API key for secret ID '{secretId.SecretId}' due to an API issue: '{result.StatusCode}'");
|
this.logger!.LogError($"Failed to delete the API key for secret ID '{secretId.SecretId}' due to an API issue: '{result.StatusCode}'");
|
||||||
return new DeleteSecretResponse{Success = false, WasEntryFound = false, Issue = TB("Failed to delete the API key due to an API issue.")};
|
return new DeleteSecretResponse{Success = false, WasEntryFound = false, Issue = TB("Failed to delete the API key due to an API issue.")};
|
||||||
}
|
}
|
||||||
|
|
||||||
var state = await result.Content.ReadFromJsonAsync<DeleteSecretResponse>(this.jsonRustSerializerOptions);
|
var state = await result.Content.ReadFromJsonAsync<DeleteSecretResponse>(this.jsonRustSerializerOptions);
|
||||||
if (!state.Success)
|
if (!state.Success)
|
||||||
this.logger!.LogError($"Failed to delete the API key for secret ID '{secretId.SecretId}': '{state.Issue}'");
|
this.logger!.LogError($"Failed to delete the API key for secret ID '{secretId.SecretId}': '{state.Issue}'");
|
||||||
|
|
||||||
return state;
|
return state;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -27,28 +27,28 @@ window.scrollToBottom = function(element) {
|
|||||||
element.scrollIntoView({ behavior: 'smooth', block: 'end', inline: 'nearest' });
|
element.scrollIntoView({ behavior: 'smooth', block: 'end', inline: 'nearest' });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
window.playSound = function(soundPath) {
|
||||||
|
try {
|
||||||
|
const audio = new Audio(soundPath);
|
||||||
|
audio.play().catch(error => {
|
||||||
|
console.warn('Failed to play sound effect:', error);
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('Error creating audio element:', error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let mediaRecorder;
|
let mediaRecorder;
|
||||||
let actualRecordingMimeType;
|
let actualRecordingMimeType;
|
||||||
let changedMimeType = false;
|
let changedMimeType = false;
|
||||||
let pendingChunkUploads = 0;
|
let pendingChunkUploads = 0;
|
||||||
|
|
||||||
window.audioRecorder = {
|
window.audioRecorder = {
|
||||||
playSound: function(soundPath) {
|
|
||||||
try {
|
|
||||||
const audio = new Audio(soundPath);
|
|
||||||
audio.play().catch(error => {
|
|
||||||
console.warn('Failed to play sound effect:', error);
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
console.warn('Error creating audio element:', error);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
start: async function (dotnetRef, desiredMimeTypes = []) {
|
start: async function (dotnetRef, desiredMimeTypes = []) {
|
||||||
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
||||||
|
|
||||||
// Play start recording sound effect:
|
// Play start recording sound effect:
|
||||||
this.playSound('/sounds/start_recording.ogg');
|
window.playSound('/sounds/start_recording.ogg');
|
||||||
|
|
||||||
// When only one mime type is provided as a string, convert it to an array:
|
// When only one mime type is provided as a string, convert it to an array:
|
||||||
if (typeof desiredMimeTypes === 'string') {
|
if (typeof desiredMimeTypes === 'string') {
|
||||||
@ -138,7 +138,7 @@ window.audioRecorder = {
|
|||||||
console.log('Audio recording - all chunks uploaded, finalizing.');
|
console.log('Audio recording - all chunks uploaded, finalizing.');
|
||||||
|
|
||||||
// Play stop recording sound effect:
|
// Play stop recording sound effect:
|
||||||
window.audioRecorder.playSound('/sounds/stop_recording.ogg');
|
window.playSound('/sounds/stop_recording.ogg');
|
||||||
|
|
||||||
// Stop all tracks to release the microphone:
|
// Stop all tracks to release the microphone:
|
||||||
mediaRecorder.stream.getTracks().forEach(track => track.stop());
|
mediaRecorder.stream.getTracks().forEach(track => track.stop());
|
||||||
|
|||||||
@ -7,3 +7,5 @@
|
|||||||
- Added the option to configure embedding providers through a config plugin and distribute them within an organization.
|
- Added the option to configure embedding providers through a config plugin and distribute them within an organization.
|
||||||
- Improved the app versioning. Starting in 2026, each version number includes the year, followed by the month. The last digit shows the release number for that month. For example, version `26.1.1` is the first release in January 2026.
|
- Improved the app versioning. Starting in 2026, each version number includes the year, followed by the month. The last digit shows the release number for that month. For example, version `26.1.1` is the first release in January 2026.
|
||||||
- Fixed a bug in the profile selection where the "Use no profile" entry could not be localized, causing English text to appear in languages such as German. This behavior has now been fixed.
|
- Fixed a bug in the profile selection where the "Use no profile" entry could not be localized, causing English text to appear in languages such as German. This behavior has now been fixed.
|
||||||
|
- Fixed a bug in the provider dialogs (LLMs, embeddings, and transcriptions) when editing a provider. In cases where an error had to be displayed, a non-localized message in English was used.
|
||||||
|
- Fixed a very rare bug in the provider dialogs (LLMs, embeddings, and transcriptions) where a validation error appeared if the API key could not be read from the operating system, but the error did not clear after the user changed the API key.
|
||||||
Loading…
Reference in New Issue
Block a user