Merge branch 'main' into extract-i18n

This commit is contained in:
Thorsten Sommer 2025-04-24 13:46:47 +02:00 committed by GitHub
commit 167eb134ce
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
41 changed files with 695 additions and 473 deletions

View File

@ -15,6 +15,7 @@ Things we are currently working on:
- [x] ~~App: Management of data sources (local & external data via [ERI](https://github.com/MindWorkAI/ERI)) (PR [#259](https://github.com/MindWorkAI/AI-Studio/pull/259), [#273](https://github.com/MindWorkAI/AI-Studio/pull/273))~~
- [x] ~~Runtime: Extract data from txt / md / pdf / docx / xlsx files (PR [#374](https://github.com/MindWorkAI/AI-Studio/pull/374))~~
- [ ] (*Optional*) Runtime: Implement internal embedding provider through [fastembed-rs](https://github.com/Anush008/fastembed-rs)
- [ ] App: Implement dialog for checking & handling [pandoc](https://pandoc.org/) installation ([PR #393](https://github.com/MindWorkAI/AI-Studio/pull/393))
- [ ] App: Implement external embedding providers
- [ ] App: Implement the process to vectorize one local file using embeddings
- [ ] Runtime: Integration of the vector database [LanceDB](https://github.com/lancedb/lancedb)
@ -24,26 +25,37 @@ Things we are currently working on:
- [x] ~~App: Integrate data sources in chats (PR [#282](https://github.com/MindWorkAI/AI-Studio/pull/282))~~
- Since September 2024: Experiments have been started on how we can work on long texts with AI Studio. Let's say you want to write a fantasy novel or create a complex project proposal and use LLM for support. The initial experiments were promising, but not yet satisfactory. We are testing further approaches until a satisfactory solution is found. The current state of our experiment is available as an experimental preview feature through your app configuration. Related PR: ~~[#167](https://github.com/MindWorkAI/AI-Studio/pull/167), [#226](https://github.com/MindWorkAI/AI-Studio/pull/226)~~.
- Since September 2024: Experiments have been started on how we can work on long texts with AI Studio. Let's say you want to write a fantasy novel or create a complex project proposal and use LLM for support. The initial experiments were promising, but not yet satisfactory. We are testing further approaches until a satisfactory solution is found. The current state of our experiment is available as an experimental preview feature through your app configuration. Related PR: ~~[PR #167](https://github.com/MindWorkAI/AI-Studio/pull/167), [PR #226](https://github.com/MindWorkAI/AI-Studio/pull/226)~~, [PR #376](https://github.com/MindWorkAI/AI-Studio/pull/376).
- Since March 2025: We have started developing the plugin system. There will be language plugins to offer AI Studio in other languages, configuration plugins to centrally manage certain providers and rules within an organization, and assistant plugins that allow anyone to develop their own assistants. We are using Lua as the plugin language:
- [x] ~~Plan & implement the base plugin system ([PR #322](https://github.com/MindWorkAI/AI-Studio/pull/322))~~
- [x] ~~Start the plugin system ([PR #372](https://github.com/MindWorkAI/AI-Studio/pull/372))~~
- [x] ~~Added hot-reload support for plugins ([PR #377](https://github.com/MindWorkAI/AI-Studio/pull/377), [PR #391](https://github.com/MindWorkAI/AI-Studio/pull/391))~~
- [ ] Add support for other languages (I18N) to AI Studio (~~[PR #381](https://github.com/MindWorkAI/AI-Studio/pull/381), [PR #400](https://github.com/MindWorkAI/AI-Studio/pull/400)~~, [PR #404](https://github.com/MindWorkAI/AI-Studio/pull/404))
- [ ] Add an I18N assistant to translate all AI Studio texts to a certain language & culture
- [ ] Provide MindWork AI Studio in German ([#31](https://github.com/MindWorkAI/Planning/issues/31))
- [ ] Add configuration plugins, which allow pre-defining some LLM providers in organizations
- [ ] Add an app store for plugins, showcasing community-contributed plugins from public GitHub and GitLab repositories. This will enable AI Studio users to discover, install, and update plugins directly within the platform.
- [ ] Add assistant plugins
Other News:
- October 2024: We've found the first two financial supporters. Huge thanks to `richard-stanton` and `peerschuett` for backing the project. Thanks for having the courage to be the first to support us.
- April 2025: We have two active financial supporters: Peer `peerschuett` and Dominic `donework`. Thank you very much for your support. MindWork AI reinvests these donations by passing them on to our AI Studio dependencies ([see here](https://github.com/orgs/MindWorkAI/sponsoring)). In the event that we receive large donations, we will first sign the app ([#56](https://github.com/MindWorkAI/Planning/issues/56)). In case we receive more donations, we will look for and pay staff to develop features for AI Studio.
- October 2024: The [German Aerospace Center (DLR)](https://en.wikipedia.org/wiki/German_Aerospace_Center) ([Website](https://www.dlr.de/en)) will use AI Studio at least within the scope of one project and will also contribute to its further development. This is great news.
- April 2025: The [German Aerospace Center (DLR)](https://en.wikipedia.org/wiki/German_Aerospace_Center) ([Website](https://www.dlr.de/en)) will use AI Studio at least within the scope of three projects and will also contribute to its further development. This is great news.
Features we have recently released:
- v0.9.40: Added support for the `o4` models from OpenAI. Also, we added Alibaba Cloud & Hugging Face as LLM providers.
- v0.9.39: Added the plugin system as a preview feature.
- v0.9.31: Added Helmholtz & GWDG as LLM providers. This is a huge improvement for many researchers out there who can use these providers for free. We added DeepSeek as a provider as well.
- v0.9.29: Added agents to support the RAG process (selecting the best data sources & validating retrieved data as part of the augmentation process)
- v0.9.26+: Added RAG for external data sources using our [ERI interface](https://mindworkai.org/#eri---external-retrieval-interface) as a preview feature.
- v0.9.25: Added [xAI](https://x.ai/) as a new provider. xAI provides their Grok models for generating content.
- v0.9.23: Added support for OpenAI `o` models (`o1`, `o1-mini`, `o3`, etc.); added also an [ERI](https://github.com/MindWorkAI/ERI) server coding assistant as a preview feature behind the RAG feature flag. Your own ERI server can be used to gain access to, e.g., your enterprise data from within AI Studio.
- v0.9.22: Added options for preview features; added embedding provider configuration for RAG (preview) and writer mode (experimental preview).
- v0.9.18: Added the new Anthropic Heiku model; added Groq and Google Gemini as provider options.
- v0.9.17: Added the new Anthropic model `claude-3-5-sonnet-20241022`.
- v0.9.16: Added workspace display options & improved the layout of the app window.
- v0.9.15: Added the bias-of-the-day assistant. Tells you about a cognitive bias every day.
- v0.9.13: You can use `ollama` providers secured with API keys.
## What is AI Studio?
@ -56,7 +68,19 @@ MindWork AI Studio is a free desktop app for macOS, Windows, and Linux. It provi
**Key advantages:**
- **Free of charge**: The app is free to use, both for personal and commercial purposes.
- **Independence**: You are not tied to any single provider. Instead, you can choose the provider that best suits their needs. Right now, we support OpenAI (GPT4o, o1, etc.), Mistral, Anthropic (Claude), Google Gemini, xAI (Grok), DeepSeek, and self-hosted models using [llama.cpp](https://github.com/ggerganov/llama.cpp), [ollama](https://github.com/ollama/ollama), [LM Studio](https://lmstudio.ai/), [Groq](https://groq.com/), or [Fireworks](https://fireworks.ai/). For scientists and employees of research institutions, we also support [Helmholtz](https://helmholtz.cloud/services/?serviceID=d7d5c597-a2f6-4bd1-b71e-4d6499d98570) and [GWDG](https://gwdg.de/services/application-services/ai-services/) AI services. These are available through federated logins like eduGAIN to all 18 Helmholtz Centers, the Max Planck Society, most German, and many international universities.
- **Independence**: You are not tied to any single provider. Instead, you can choose the providers that best suit your needs. Right now, we support:
- [OpenAI](https://openai.com/) (GPT4o, GPT4.1, o1, o3, o4, etc.)
- [Mistral](https://mistral.ai/)
- [Anthropic](https://www.anthropic.com/) (Claude)
- [Google Gemini](https://gemini.google.com)
- [xAI](https://x.ai/) (Grok)
- [DeepSeek](https://www.deepseek.com/en)
- [Alibaba Cloud](https://www.alibabacloud.com) (Qwen)
- [Hugging Face](https://huggingface.co/) using their [inference providers](https://huggingface.co/docs/inference-providers/index) such as Cerebras, Nebius, Sambanova, Novita, Hyperbolic, Together AI, Fireworks, Hugging Face
- Self-hosted models using [llama.cpp](https://github.com/ggerganov/llama.cpp), [ollama](https://github.com/ollama/ollama), [LM Studio](https://lmstudio.ai/)
- [Groq](https://groq.com/)
- [Fireworks](https://fireworks.ai/)
- For scientists and employees of research institutions, we also support [Helmholtz](https://helmholtz.cloud/services/?serviceID=d7d5c597-a2f6-4bd1-b71e-4d6499d98570) and [GWDG](https://gwdg.de/services/application-services/ai-services/) AI services. These are available through federated logins like eduGAIN to all 18 Helmholtz Centers, the Max Planck Society, most German, and many international universities.
- **Assistants**: You just want to quickly translate a text? AI Studio has so-called assistants for such and other tasks. No prompting is necessary when working with these assistants.
- **Unrestricted usage**: Unlike services like ChatGPT, which impose limits after intensive use, MindWork AI Studio offers unlimited usage through the providers API.
- **Cost-effective**: You only pay for what you use, which can be cheaper than monthly subscription services like ChatGPT Plus, especially if used infrequently. But beware, here be dragons: For extremely intensive usage, the API costs can be significantly higher. Unfortunately, providers currently do not offer a way to display current costs in the app. Therefore, check your account with the respective provider to see how your costs are developing. When available, use prepaid and set a cost limit.

View File

@ -0,0 +1,3 @@
namespace Build.Commands;
public record AppVersion(string VersionText, int Major, int Minor, int Patch);

View File

@ -19,7 +19,7 @@ public sealed partial class UpdateMetadataCommands
return;
// Prepare the metadata for the next release:
await this.Prepare(action);
await this.PerformPrepare(action, true);
// Build once to allow the Rust compiler to read the changed metadata
// and to update all .NET artifacts:
@ -36,6 +36,21 @@ public sealed partial class UpdateMetadataCommands
// artifacts are already in place, and .NET knows the updated web assets, etc.:
await this.Build();
}
[Command("update-versions", Description = "The command will update the package versions in the metadata file")]
public async Task UpdateVersions()
{
if(!Environment.IsWorkingDirectoryValid())
return;
Console.WriteLine("==============================");
Console.WriteLine("- Update the main package versions ...");
await this.UpdateDotnetVersion();
await this.UpdateRustVersion();
await this.UpdateMudBlazorVersion();
await this.UpdateTauriVersion();
}
[Command("prepare", Description = "Prepare the metadata for the next release")]
public async Task Prepare(PrepareAction action)
@ -44,14 +59,31 @@ public sealed partial class UpdateMetadataCommands
return;
Console.WriteLine("==============================");
Console.Write("- Are you trying to prepare a new release? (y/n) ");
var userAnswer = Console.ReadLine();
if (userAnswer?.ToLowerInvariant() == "y")
{
Console.WriteLine("- Please use the 'release' command instead");
return;
}
await this.PerformPrepare(action, false);
}
private async Task PerformPrepare(PrepareAction action, bool internalCall)
{
if(internalCall)
Console.WriteLine("==============================");
Console.WriteLine("- Prepare the metadata for the next release ...");
var appVersion = await this.UpdateAppVersion(action);
if (!string.IsNullOrWhiteSpace(appVersion))
if (!string.IsNullOrWhiteSpace(appVersion.VersionText))
{
var buildNumber = await this.IncreaseBuildNumber();
var buildTime = await this.UpdateBuildTime();
await this.UpdateChangelog(buildNumber, appVersion, buildTime);
await this.UpdateChangelog(buildNumber, appVersion.VersionText, buildTime);
await this.CreateNextChangelog(buildNumber, appVersion);
await this.UpdateDotnetVersion();
await this.UpdateRustVersion();
await this.UpdateMudBlazorVersion();
@ -204,14 +236,59 @@ public sealed partial class UpdateMetadataCommands
}
}
private async Task CreateNextChangelog(int currentBuildNumber, AppVersion currentAppVersion)
{
Console.Write("- Create the next changelog ...");
var pathChangelogs = Path.Combine(Environment.GetAIStudioDirectory(), "wwwroot", "changelog");
var nextBuildNumber = currentBuildNumber + 1;
//
// We assume that most of the time, there will be patch releases:
//
var nextMajor = currentAppVersion.Major;
var nextMinor = currentAppVersion.Minor;
var nextPatch = currentAppVersion.Patch + 1;
var nextAppVersion = $"{nextMajor}.{nextMinor}.{nextPatch}";
var nextChangelogFilename = $"v{nextAppVersion}.md";
var nextChangelogFilePath = Path.Combine(pathChangelogs, nextChangelogFilename);
//
// Regarding the next build time: We assume that the next release will take place in one week from now.
// Thus, we check how many days this month has left. In the end, we want to predict the year and month
// for the next build. Day, hour, minute and second are all set to x.
//
var nextBuildMonth = (DateTime.Today + TimeSpan.FromDays(7)).Month;
var nextBuildYear = (DateTime.Today + TimeSpan.FromDays(7)).Year;
var nextBuildTimeString = $"{nextBuildYear}-{nextBuildMonth:00}-xx xx:xx UTC";
var changelogHeader = $"""
# v{nextAppVersion}, build {nextBuildNumber} ({nextBuildTimeString})
""";
if(!File.Exists(nextChangelogFilePath))
{
await File.WriteAllTextAsync(nextChangelogFilePath, changelogHeader, Environment.UTF8_NO_BOM);
Console.WriteLine($" done. Changelog '{nextChangelogFilename}' created.");
}
else
{
Console.WriteLine(" failed.");
Console.WriteLine("- Error: The changelog file already exists.");
}
}
private async Task UpdateChangelog(int buildNumber, string appVersion, string buildTime)
{
Console.Write("- Updating the in-app changelog list ...");
var pathChangelogs = Path.Combine(Environment.GetAIStudioDirectory(), "wwwroot", "changelog");
var expectedLogFilename = $"v{appVersion}.md";
var expectedLogFilePath = Path.Combine(pathChangelogs, expectedLogFilename);
if(!File.Exists(expectedLogFilePath))
{
Console.WriteLine(" failed.");
Console.WriteLine($"- Error: The changelog file '{expectedLogFilename}' does not exist.");
return;
}
@ -235,6 +312,7 @@ public sealed partial class UpdateMetadataCommands
changelogCode = changelogCode.Replace(CODE_START, updatedCode);
await File.WriteAllTextAsync(changelogCodePath, changelogCode, Environment.UTF8_NO_BOM);
Console.WriteLine(" done.");
}
private async Task UpdateArchitecture(RID rid)
@ -267,14 +345,14 @@ public sealed partial class UpdateMetadataCommands
await File.WriteAllLinesAsync(pathMetadata, lines, Environment.UTF8_NO_BOM);
}
private async Task<string> UpdateAppVersion(PrepareAction action)
private async Task<AppVersion> UpdateAppVersion(PrepareAction action)
{
const int APP_VERSION_INDEX = 0;
if (action == PrepareAction.NONE)
{
Console.WriteLine("- No action specified. Skipping app version update.");
return string.Empty;
return new(string.Empty, 0, 0, 0);
}
var pathMetadata = Environment.GetMetadataPath();
@ -308,7 +386,8 @@ public sealed partial class UpdateMetadataCommands
lines[APP_VERSION_INDEX] = updatedAppVersion;
await File.WriteAllLinesAsync(pathMetadata, lines, Environment.UTF8_NO_BOM);
return updatedAppVersion;
return new(updatedAppVersion, currentMajor, currentMinor, currentPatch);
}
private async Task UpdateLicenceYear(string licenceFilePath)

View File

@ -13,6 +13,7 @@ public partial class Changelog
public static readonly Log[] LOGS =
[
new (215, "v0.9.40, build 215 (2025-04-20 13:30 UTC)", "v0.9.40.md"),
new (214, "v0.9.39, build 214 (2025-04-07 17:39 UTC)", "v0.9.39.md"),
new (213, "v0.9.38, build 213 (2025-03-17 18:18 UTC)", "v0.9.38.md"),
new (212, "v0.9.37, build 212 (2025-03-16 20:32 UTC)", "v0.9.37.md"),

View File

@ -41,7 +41,7 @@
<RowTemplate>
<MudTd>@context.Num</MudTd>
<MudTd>@context.Name</MudTd>
<MudTd>@context.UsedLLMProvider</MudTd>
<MudTd>@context.UsedLLMProvider.ToName()</MudTd>
<MudTd>@this.GetEmbeddingProviderModelName(context)</MudTd>
<MudTd>

View File

@ -65,7 +65,7 @@ public partial class SettingsPanelProviders : SettingsPanelBase
{ x => x.IsSelfHosted, provider.IsSelfHosted },
{ x => x.IsEditing, true },
{ x => x.DataHost, provider.Host },
{ x => x.HfInstanceProviderId, provider.HFInstanceProvider },
{ x => x.HFInferenceProviderId, provider.HFInferenceProvider },
};
var dialogReference = await this.DialogService.ShowAsync<ProviderDialog>("Edit LLM Provider", dialogParameters, DialogOptions.FULLSCREEN);

View File

@ -77,7 +77,7 @@ public partial class Workspaces : ComponentBase
{
Depth = 0,
Branch = WorkspaceBranch.TEMPORARY_CHATS,
Text = "Temporary chats",
Text = "Disappearing Chats",
Icon = Icons.Material.Filled.Timer,
Expandable = true,
Path = "temp",

View File

@ -11,49 +11,58 @@
{
if (provider.ProvideEmbeddings())
{
<MudSelectItem Value="@provider">@provider</MudSelectItem>
<MudSelectItem Value="@provider">
@provider.ToName()
</MudSelectItem>
}
}
</MudSelect>
<MudButton Disabled="@(!this.DataLLMProvider.ShowRegisterButton())" Variant="Variant.Filled" Size="Size.Small" StartIcon="@Icons.Material.Filled.OpenInBrowser" Href="@this.DataLLMProvider.GetCreationURL()" Target="_blank">Create account</MudButton>
</MudStack>
@* ReSharper disable once CSharpWarnings::CS8974 *@
<MudTextField
T="string"
@bind-Text="@this.dataAPIKey"
Label="@this.APIKeyText"
Disabled="@(!this.DataLLMProvider.IsAPIKeyNeeded(this.DataHost))"
Class="mb-3"
Adornment="Adornment.Start"
AdornmentIcon="@Icons.Material.Filled.VpnKey"
AdornmentColor="Color.Info"
InputType="InputType.Password"
Validation="@this.providerValidation.ValidatingAPIKey"
/>
<MudTextField
T="string"
@bind-Text="@this.DataHostname"
Label="Hostname"
Disabled="@(!this.DataLLMProvider.IsHostnameNeeded())"
Class="mb-3"
Adornment="Adornment.Start"
AdornmentIcon="@Icons.Material.Filled.Dns"
AdornmentColor="Color.Info"
Validation="@this.providerValidation.ValidatingHostname"
UserAttributes="@SPELLCHECK_ATTRIBUTES"
/>
@if (this.DataLLMProvider.IsAPIKeyNeeded(this.DataHost))
{
@* ReSharper disable once CSharpWarnings::CS8974 *@
<MudTextField
T="string"
@bind-Text="@this.dataAPIKey"
Label="@this.APIKeyText"
Class="mb-3"
Adornment="Adornment.Start"
AdornmentIcon="@Icons.Material.Filled.VpnKey"
AdornmentColor="Color.Info"
InputType="InputType.Password"
Validation="@this.providerValidation.ValidatingAPIKey"/>
}
<MudSelect Disabled="@(!this.DataLLMProvider.IsHostNeeded())" @bind-Value="@this.DataHost" Label="Host" Class="mb-3" OpenIcon="@Icons.Material.Filled.ExpandMore" AdornmentColor="Color.Info" Adornment="Adornment.Start" Validation="@this.providerValidation.ValidatingHost">
@foreach (Host host in Enum.GetValues(typeof(Host)))
{
if (host.AreEmbeddingsSupported())
@if (this.DataLLMProvider.IsHostnameNeeded())
{
<MudTextField
T="string"
@bind-Text="@this.DataHostname"
Label="Hostname"
Class="mb-3"
Adornment="Adornment.Start"
AdornmentIcon="@Icons.Material.Filled.Dns"
AdornmentColor="Color.Info"
Validation="@this.providerValidation.ValidatingHostname"
UserAttributes="@SPELLCHECK_ATTRIBUTES"/>
}
@if (this.DataLLMProvider.IsHostNeeded())
{
<MudSelect @bind-Value="@this.DataHost" Label="Host" Class="mb-3" OpenIcon="@Icons.Material.Filled.ExpandMore" AdornmentColor="Color.Info" Adornment="Adornment.Start" Validation="@this.providerValidation.ValidatingHost">
@foreach (Host host in Enum.GetValues(typeof(Host)))
{
<MudSelectItem Value="@host">@host.Name()</MudSelectItem>
if (host.AreEmbeddingsSupported())
{
<MudSelectItem Value="@host">
@host.Name()
</MudSelectItem>
}
}
}
</MudSelect>
</MudSelect>
}
<MudStack Row="@true" AlignItems="AlignItems.Center">
@if (this.DataLLMProvider.IsEmbeddingModelProvidedManually(this.DataHost))

View File

@ -10,25 +10,29 @@
<MudSelect @bind-Value="@this.DataLLMProvider" Label="Provider" Class="mb-3" OpenIcon="@Icons.Material.Filled.AccountBalance" AdornmentColor="Color.Info" Adornment="Adornment.Start" Validation="@this.providerValidation.ValidatingProvider">
@foreach (LLMProviders provider in Enum.GetValues(typeof(LLMProviders)))
{
<MudSelectItem Value="@provider">@provider.ToName()</MudSelectItem>
<MudSelectItem Value="@provider">
@provider.ToName()
</MudSelectItem>
}
</MudSelect>
<MudButton Disabled="@(!this.DataLLMProvider.ShowRegisterButton())" Variant="Variant.Filled" Size="Size.Small" StartIcon="@Icons.Material.Filled.OpenInBrowser" Href="@this.DataLLMProvider.GetCreationURL()" Target="_blank">Create account</MudButton>
</MudStack>
@* ReSharper disable once CSharpWarnings::CS8974 *@
<MudTextField
T="string"
@bind-Text="@this.dataAPIKey"
Label="@this.APIKeyText"
Disabled="@(!this.DataLLMProvider.IsAPIKeyNeeded(this.DataHost))"
Class="mb-3"
Adornment="Adornment.Start"
AdornmentIcon="@Icons.Material.Filled.VpnKey"
AdornmentColor="Color.Info"
InputType="InputType.Password"
Validation="@this.providerValidation.ValidatingAPIKey"
/>
@if (this.DataLLMProvider.IsAPIKeyNeeded(this.DataHost))
{
@* ReSharper disable once CSharpWarnings::CS8974 *@
<MudTextField
T="string"
@bind-Text="@this.dataAPIKey"
Label="@this.APIKeyText"
Class="mb-3"
Adornment="Adornment.Start"
AdornmentIcon="@Icons.Material.Filled.VpnKey"
AdornmentColor="Color.Info"
InputType="InputType.Password"
Immediate="true"
Validation="@this.providerValidation.ValidatingAPIKey"/>
}
@if (this.DataLLMProvider.IsHostnameNeeded())
{
@ -36,7 +40,6 @@
T="string"
@bind-Text="@this.DataHostname"
Label="Hostname"
Disabled="@(!this.DataLLMProvider.IsHostnameNeeded())"
Class="mb-3"
Adornment="Adornment.Start"
AdornmentIcon="@Icons.Material.Filled.Dns"
@ -47,53 +50,73 @@
@if (this.DataLLMProvider.IsHostNeeded())
{
<MudSelect Disabled="@(!this.DataLLMProvider.IsHostNeeded())" @bind-Value="@this.DataHost" Label="Host" Class="mb-3" OpenIcon="@Icons.Material.Filled.ExpandMore" AdornmentColor="Color.Info" Adornment="Adornment.Start" Validation="@this.providerValidation.ValidatingHost">
<MudSelect @bind-Value="@this.DataHost" Label="Host" Class="mb-3" OpenIcon="@Icons.Material.Filled.ExpandMore" AdornmentColor="Color.Info" Adornment="Adornment.Start" Validation="@this.providerValidation.ValidatingHost">
@foreach (Host host in Enum.GetValues(typeof(Host)))
{
<MudSelectItem Value="@host">@host.Name()</MudSelectItem>
<MudSelectItem Value="@host">
@host.Name()
</MudSelectItem>
}
</MudSelect>
}
@if (this.DataLLMProvider.IsHFInstanceProviderNeeded())
{
<MudSelect Disabled="@(!this.DataLLMProvider.IsHFInstanceProviderNeeded())" @bind-Value="@this.HfInstanceProviderId" Label="HF Instance Provider" Class="mb-3" OpenIcon="@Icons.Material.Filled.Dns" AdornmentColor="Color.Info" Adornment="Adornment.Start" Validation="@this.providerValidation.ValidatingHFInstanceProvider">
@foreach (HFInstanceProvider instanceProvider in Enum.GetValues(typeof(HFInstanceProvider)))
<MudSelect @bind-Value="@this.HFInferenceProviderId" Label="Hugging Face Inference Provider" Class="mb-3" OpenIcon="@Icons.Material.Filled.Dns" AdornmentColor="Color.Info" Adornment="Adornment.Start" Validation="@this.providerValidation.ValidatingHFInstanceProvider">
@foreach (HFInferenceProvider inferenceProvider in Enum.GetValues(typeof(HFInferenceProvider)))
{
<MudSelectItem Value="@instanceProvider">@instanceProvider.ToName()</MudSelectItem>
<MudSelectItem Value="@inferenceProvider">
@inferenceProvider.ToName()
</MudSelectItem>
}
</MudSelect>
<MudJustifiedText Class="mb-3"> Please double-check if your model name matches the curl specifications provided by the instance provider. If it doesn't, you might get a <b>Not Found</b> error when trying to use the model. Here's a <MudLink Href="https://huggingface.co/meta-llama/Llama-3.1-8B-Instruct?inference_api=true&inference_provider=novita&language=sh" Target="_blank">curl example</MudLink>.</MudJustifiedText>
<MudJustifiedText Class="mb-3"> Please double-check if your model name matches the curl specifications provided by the inference provider. If it doesn't, you might get a <b>Not Found</b> error when trying to use the model. Here's a <MudLink Href="https://huggingface.co/meta-llama/Llama-3.1-8B-Instruct?inference_api=true&inference_provider=novita&language=sh" Target="_blank">curl example</MudLink>.</MudJustifiedText>
}
<MudStack Row="@true" AlignItems="AlignItems.Center">
@if (this.DataLLMProvider.IsLLMModelProvidedManually())
{
<MudButton Variant="Variant.Filled" Size="Size.Small" StartIcon="@Icons.Material.Filled.OpenInBrowser" Href="@this.DataLLMProvider.GetModelsOverviewURL(this.HfInstanceProviderId)" Target="_blank">Show available models</MudButton>
<MudTextField
T="string"
@bind-Text="@this.dataManuallyModel"
Label="Model"
Class="mb-3"
Adornment="Adornment.Start"
AdornmentIcon="@Icons.Material.Filled.FaceRetouchingNatural"
AdornmentColor="Color.Info"
Validation="@this.ValidateManuallyModel"
UserAttributes="@SPELLCHECK_ATTRIBUTES"
/>
}
else
{
<MudButton Disabled="@(!this.DataLLMProvider.CanLoadModels(this.DataHost, this.dataAPIKey))" Variant="Variant.Filled" Size="Size.Small" StartIcon="@Icons.Material.Filled.Refresh" OnClick="this.ReloadModels">Load</MudButton>
<MudSelect Disabled="@this.IsNoneProvider" @bind-Value="@this.DataModel" Label="Model" Class="mb-3" OpenIcon="@Icons.Material.Filled.FaceRetouchingNatural" AdornmentColor="Color.Info" Adornment="Adornment.Start" Validation="@this.providerValidation.ValidatingModel">
@foreach (var model in this.availableModels)
<MudField FullWidth="true" Label="Model selection" Variant="Variant.Outlined" Class="mb-3">
<MudStack Row="@true" AlignItems="AlignItems.Center" StretchItems="StretchItems.End">
@if (this.DataLLMProvider.IsLLMModelProvidedManually())
{
<MudButton Variant="Variant.Filled" Size="Size.Small" StartIcon="@Icons.Material.Filled.OpenInBrowser" Href="@this.DataLLMProvider.GetModelsOverviewURL(this.HFInferenceProviderId)" Target="_blank">
Show available models
</MudButton>
<MudTextField
T="string"
@bind-Text="@this.dataManuallyModel"
Label="Model"
Adornment="Adornment.Start"
AdornmentIcon="@Icons.Material.Filled.FaceRetouchingNatural"
AdornmentColor="Color.Info"
Validation="@this.ValidateManuallyModel"
UserAttributes="@SPELLCHECK_ATTRIBUTES"
/>
}
else
{
<MudButton Disabled="@(!this.DataLLMProvider.CanLoadModels(this.DataHost, this.dataAPIKey))" Variant="Variant.Filled" Size="Size.Small" StartIcon="@Icons.Material.Filled.Refresh" OnClick="this.ReloadModels">
Load models
</MudButton>
@if(this.availableModels.Count is 0)
{
<MudSelectItem Value="@model">@model</MudSelectItem>
<MudText Typo="Typo.body1">
No models loaded or available.
</MudText>
}
</MudSelect>
}
</MudStack>
else
{
<MudSelect @bind-Value="@this.DataModel"
OpenIcon="@Icons.Material.Filled.FaceRetouchingNatural" AdornmentColor="Color.Info"
Adornment="Adornment.Start" Validation="@this.providerValidation.ValidatingModel">
@foreach (var model in this.availableModels)
{
<MudSelectItem Value="@model">@model</MudSelectItem>
}
</MudSelect>
}
}
</MudStack>
</MudField>
@* ReSharper disable once CSharpWarnings::CS8974 *@
<MudTextField

View File

@ -52,7 +52,7 @@ public partial class ProviderDialog : ComponentBase, ISecretId
/// The HFInstanceProvider to use, e.g., CEREBRAS.
/// </summary>
[Parameter]
public HFInstanceProvider HfInstanceProviderId { get; set; } = HFInstanceProvider.NONE;
public HFInferenceProvider HFInferenceProviderId { get; set; } = HFInferenceProvider.NONE;
/// <summary>
/// Is this provider self-hosted?
@ -138,7 +138,7 @@ public partial class ProviderDialog : ComponentBase, ISecretId
IsSelfHosted = this.DataLLMProvider is LLMProviders.SELF_HOSTED,
Hostname = cleanedHostname.EndsWith('/') ? cleanedHostname[..^1] : cleanedHostname,
Host = this.DataHost,
HFInstanceProvider = this.HfInstanceProviderId,
HFInferenceProvider = this.HFInferenceProviderId,
};
}
@ -272,6 +272,4 @@ public partial class ProviderDialog : ComponentBase, ISecretId
LLMProviders.SELF_HOSTED => "(Optional) API Key",
_ => "API Key",
};
private bool IsNoneProvider => this.DataLLMProvider is LLMProviders.NONE;
}

View File

@ -155,7 +155,6 @@ public partial class MainLayout : LayoutComponentBase, IMessageBusReceiver, ILan
};
config.Action = T("Show details");
config.ActionVariant = Variant.Filled;
config.ActionColor = Color.Dark;
});
}
@ -188,17 +187,18 @@ public partial class MainLayout : LayoutComponentBase, IMessageBusReceiver, ILan
_ = Task.Run(async () =>
{
// Set up the plugin system:
PluginFactory.Setup();
// Ensure that all internal plugins are present:
await PluginFactory.EnsureInternalPlugins();
// Load (but not start) all plugins, without waiting for them:
var pluginLoadingTimeout = new CancellationTokenSource(TimeSpan.FromSeconds(5));
await PluginFactory.LoadAll(pluginLoadingTimeout.Token);
// Set up hot reloading for plugins:
PluginFactory.SetUpHotReloading();
if (PluginFactory.Setup())
{
// Ensure that all internal plugins are present:
await PluginFactory.EnsureInternalPlugins();
// Load (but not start) all plugins, without waiting for them:
var pluginLoadingTimeout = new CancellationTokenSource(TimeSpan.FromSeconds(5));
await PluginFactory.LoadAll(pluginLoadingTimeout.Token);
// Set up hot reloading for plugins:
PluginFactory.SetUpHotReloading();
}
});
}

View File

@ -48,10 +48,10 @@
<ItemGroup>
<PackageReference Include="CodeBeam.MudBlazor.Extensions" Version="8.0.0" />
<PackageReference Include="HtmlAgilityPack" Version="1.12.0" />
<PackageReference Include="Microsoft.Extensions.FileProviders.Embedded" Version="9.0.3" />
<PackageReference Include="HtmlAgilityPack" Version="1.12.1" />
<PackageReference Include="Microsoft.Extensions.FileProviders.Embedded" Version="9.0.4" />
<PackageReference Include="MudBlazor" Version="8.5.1" />
<PackageReference Include="MudBlazor.Markdown" Version="8.0.0" />
<PackageReference Include="MudBlazor.Markdown" Version="8.5.1" />
<PackageReference Include="ReverseMarkdown" Version="4.6.0" />
<PackageReference Include="LuaCSharp" Version="0.4.2" />
</ItemGroup>

View File

@ -31,7 +31,7 @@ public partial class About : MSGComponentBase
private static string TauriVersion => $"Tauri: v{META_DATA.TauriVersion}";
private string OSLanguage => $"{this.T("User-language provided by the OS")}: '{this.osLanguage}'";
private string OSLanguage => $"{T("User-language provided by the OS")}: '{this.osLanguage}'";
private string VersionRust => $"{T("Used Rust compiler")}: v{META_DATA.RustVersion}";

View File

@ -11,7 +11,7 @@
}
else
{
@(T("Short-Term Chat"))
@(T("Disappearing Chat"))
}
</MudText>

View File

@ -24,7 +24,7 @@ public partial class Home : MSGComponentBase
this.itemsAdvantages = [
new(T("Free of charge"), T("The app is free to use, both for personal and commercial purposes.")),
new(T("Independence"), T("You are not tied to any single provider. Instead, you might choose the provider that best suits your needs. Right now, we support OpenAI (GPT4o, o1, etc.), Mistral, Anthropic (Claude), Google Gemini, xAI (Grok), DeepSeek, and self-hosted models using llama.cpp, ollama, LM Studio, Groq, or Fireworks. For scientists and employees of research institutions, we also support Helmholtz and GWDG AI services. These are available through federated logins like eduGAIN to all 18 Helmholtz Centers, the Max Planck Society, most German, and many international universities.")),
new(T("Independence"), T("You are not tied to any single provider. Instead, you might choose the provider that best suits your needs. Right now, we support OpenAI (GPT4o, o1, etc.), Mistral, Anthropic (Claude), Google Gemini, xAI (Grok), DeepSeek, Alibaba Cloud (Qwen), Hugging Face, and self-hosted models using llama.cpp, ollama, LM Studio, Groq, or Fireworks. For scientists and employees of research institutions, we also support Helmholtz and GWDG AI services. These are available through federated logins like eduGAIN to all 18 Helmholtz Centers, the Max Planck Society, most German, and many international universities.")),
new(T("Assistants"), T("You just want to quickly translate a text? AI Studio has so-called assistants for such and other tasks. No prompting is necessary when working with these assistants.")),
new(T("Unrestricted usage"), T("Unlike services like ChatGPT, which impose limits after intensive use, MindWork AI Studio offers unlimited usage through the providers API.")),
new(T("Cost-effective"), T("You only pay for what you use, which can be cheaper than monthly subscription services like ChatGPT Plus, especially if used infrequently. But beware, here be dragons: For extremely intensive usage, the API costs can be significantly higher. Unfortunately, providers currently do not offer a way to display current costs in the app. Therefore, check your account with the respective provider to see how your costs are developing. When available, use prepaid and set a cost limit.")),
@ -44,7 +44,7 @@ public partial class Home : MSGComponentBase
using var response = await this.HttpClient.GetAsync($"changelog/{latest.Filename}");
this.LastChangeContent = await response.Content.ReadAsStringAsync();
}
private const string QUICK_START_GUIDE =
"""
Ready to dive in and get started with MindWork AI Studio? This quick start guide will help you set up everything you need to start using the app.

View File

@ -130,7 +130,7 @@ internal sealed class Program
.AddHubOptions(options =>
{
options.MaximumReceiveMessageSize = null;
options.ClientTimeoutInterval = TimeSpan.FromSeconds(1_200);
options.ClientTimeoutInterval = TimeSpan.FromDays(14);
options.HandshakeTimeout = TimeSpan.FromSeconds(30);
});

View File

@ -98,7 +98,7 @@ public class ProviderGoogle(ILogger logger) : BaseProvider("https://generativela
return [];
return modelResponse.Models.Where(model =>
model.Name.StartsWith("models/gemini-", StringComparison.InvariantCultureIgnoreCase))
model.Name.StartsWith("models/gemini-", StringComparison.OrdinalIgnoreCase))
.Select(n => new Provider.Model(n.Name.Replace("models/", string.Empty), n.DisplayName));
}

View File

@ -136,7 +136,8 @@ public class ProviderGroq(ILogger logger) : BaseProvider("https://api.groq.com/o
var modelResponse = await response.Content.ReadFromJsonAsync<ModelsResponse>(token);
return modelResponse.Data.Where(n =>
!n.Id.StartsWith("whisper-", StringComparison.InvariantCultureIgnoreCase) &&
!n.Id.StartsWith("distil-", StringComparison.InvariantCultureIgnoreCase));
!n.Id.StartsWith("whisper-", StringComparison.OrdinalIgnoreCase) &&
!n.Id.StartsWith("distil-", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("-tts", StringComparison.OrdinalIgnoreCase));
}
}

View File

@ -1,9 +1,9 @@
namespace AIStudio.Provider.HuggingFace;
/// <summary>
/// Enum for instance providers that Hugging Face supports.
/// Enum for inference providers that Hugging Face supports.
/// </summary>
public enum HFInstanceProvider
public enum HFInferenceProvider
{
NONE,

View File

@ -0,0 +1,43 @@
namespace AIStudio.Provider.HuggingFace;
public static class HFInferenceProviderExtensions
{
public static string Endpoints(this HFInferenceProvider provider, Model model) => provider switch
{
HFInferenceProvider.CEREBRAS => "cerebras/v1/",
HFInferenceProvider.NEBIUS_AI_STUDIO => "nebius/v1/",
HFInferenceProvider.SAMBANOVA => "sambanova/v1/",
HFInferenceProvider.NOVITA => "novita/v3/openai/",
HFInferenceProvider.HYPERBOLIC => "hyperbolic/v1/",
HFInferenceProvider.TOGETHER_AI => "together/v1/",
HFInferenceProvider.FIREWORKS => "fireworks-ai/inference/v1/",
HFInferenceProvider.HF_INFERENCE_API => $"hf-inference/models/{model.ToString()}/v1/",
_ => string.Empty,
};
public static string EndpointsId(this HFInferenceProvider provider) => provider switch
{
HFInferenceProvider.CEREBRAS => "cerebras",
HFInferenceProvider.NEBIUS_AI_STUDIO => "nebius",
HFInferenceProvider.SAMBANOVA => "sambanova",
HFInferenceProvider.NOVITA => "novita",
HFInferenceProvider.HYPERBOLIC => "hyperbolic",
HFInferenceProvider.TOGETHER_AI => "together",
HFInferenceProvider.FIREWORKS => "fireworks",
HFInferenceProvider.HF_INFERENCE_API => "hf-inference",
_ => string.Empty,
};
public static string ToName(this HFInferenceProvider provider) => provider switch
{
HFInferenceProvider.CEREBRAS => "Cerebras",
HFInferenceProvider.NEBIUS_AI_STUDIO => "Nebius AI Studio",
HFInferenceProvider.SAMBANOVA => "Sambanova",
HFInferenceProvider.NOVITA => "Novita",
HFInferenceProvider.HYPERBOLIC => "Hyperbolic",
HFInferenceProvider.TOGETHER_AI => "Together AI",
HFInferenceProvider.FIREWORKS => "Fireworks AI",
HFInferenceProvider.HF_INFERENCE_API => "Hugging Face Inference API",
_ => string.Empty,
};
}

View File

@ -1,43 +0,0 @@
namespace AIStudio.Provider.HuggingFace;
public static class HFInstanceProviderExtensions
{
public static string Endpoints(this HFInstanceProvider provider, Model model) => provider switch
{
HFInstanceProvider.CEREBRAS => "cerebras/v1/",
HFInstanceProvider.NEBIUS_AI_STUDIO => "nebius/v1/",
HFInstanceProvider.SAMBANOVA => "sambanova/v1/",
HFInstanceProvider.NOVITA => "novita/v3/openai/",
HFInstanceProvider.HYPERBOLIC => "hyperbolic/v1/",
HFInstanceProvider.TOGETHER_AI => "together/v1/",
HFInstanceProvider.FIREWORKS => "fireworks-ai/inference/v1/",
HFInstanceProvider.HF_INFERENCE_API => $"hf-inference/models/{model.ToString()}/v1/",
_ => string.Empty,
};
public static string EndpointsId(this HFInstanceProvider provider) => provider switch
{
HFInstanceProvider.CEREBRAS => "cerebras",
HFInstanceProvider.NEBIUS_AI_STUDIO => "nebius",
HFInstanceProvider.SAMBANOVA => "sambanova",
HFInstanceProvider.NOVITA => "novita",
HFInstanceProvider.HYPERBOLIC => "hyperbolic",
HFInstanceProvider.TOGETHER_AI => "together",
HFInstanceProvider.FIREWORKS => "fireworks",
HFInstanceProvider.HF_INFERENCE_API => "hf-inference",
_ => string.Empty,
};
public static string ToName(this HFInstanceProvider provider) => provider switch
{
HFInstanceProvider.CEREBRAS => "Cerebras",
HFInstanceProvider.NEBIUS_AI_STUDIO => "Nebius AI Studio",
HFInstanceProvider.SAMBANOVA => "Sambanova",
HFInstanceProvider.NOVITA => "Novita",
HFInstanceProvider.HYPERBOLIC => "Hyperbolic",
HFInstanceProvider.TOGETHER_AI => "Together AI",
HFInstanceProvider.FIREWORKS => "Fireworks AI",
HFInstanceProvider.HF_INFERENCE_API => "Hugging Face Inference API",
_ => string.Empty,
};
}

View File

@ -11,9 +11,9 @@ namespace AIStudio.Provider.HuggingFace;
public sealed class ProviderHuggingFace : BaseProvider
{
public ProviderHuggingFace(ILogger logger, HFInstanceProvider hfProvider, Model model) : base($"https://router.huggingface.co/{hfProvider.Endpoints(model)}", logger)
public ProviderHuggingFace(ILogger logger, HFInferenceProvider hfProvider, Model model) : base($"https://router.huggingface.co/{hfProvider.Endpoints(model)}", logger)
{
logger.LogInformation($"We use the instance provider '{hfProvider}'. Thus we use the base URL 'https://router.huggingface.co/{hfProvider.Endpoints(model)}'.");
logger.LogInformation($"We use the inferende provider '{hfProvider}'. Thus we use the base URL 'https://router.huggingface.co/{hfProvider.Endpoints(model)}'.");
}
#region Implementation of IProvider

View File

@ -135,7 +135,7 @@ public static class LLMProvidersExtensions
/// <returns>The provider instance.</returns>
public static IProvider CreateProvider(this AIStudio.Settings.Provider providerSettings, ILogger logger)
{
return providerSettings.UsedLLMProvider.CreateProvider(providerSettings.InstanceName, providerSettings.Host, providerSettings.Hostname, providerSettings.Model, providerSettings.HFInstanceProvider ,logger);
return providerSettings.UsedLLMProvider.CreateProvider(providerSettings.InstanceName, providerSettings.Host, providerSettings.Hostname, providerSettings.Model, providerSettings.HFInferenceProvider ,logger);
}
/// <summary>
@ -146,10 +146,10 @@ public static class LLMProvidersExtensions
/// <returns>The provider instance.</returns>
public static IProvider CreateProvider(this EmbeddingProvider embeddingProviderSettings, ILogger logger)
{
return embeddingProviderSettings.UsedLLMProvider.CreateProvider(embeddingProviderSettings.Name, embeddingProviderSettings.Host, embeddingProviderSettings.Hostname, embeddingProviderSettings.Model, HFInstanceProvider.NONE,logger);
return embeddingProviderSettings.UsedLLMProvider.CreateProvider(embeddingProviderSettings.Name, embeddingProviderSettings.Host, embeddingProviderSettings.Hostname, embeddingProviderSettings.Model, HFInferenceProvider.NONE,logger);
}
private static IProvider CreateProvider(this LLMProviders provider, string instanceName, Host host, string hostname, Model model, HFInstanceProvider instanceProvider , ILogger logger)
private static IProvider CreateProvider(this LLMProviders provider, string instanceName, Host host, string hostname, Model model, HFInferenceProvider inferenceProvider , ILogger logger)
{
try
{
@ -165,7 +165,7 @@ public static class LLMProvidersExtensions
LLMProviders.GROQ => new ProviderGroq(logger) { InstanceName = instanceName },
LLMProviders.FIREWORKS => new ProviderFireworks(logger) { InstanceName = instanceName },
LLMProviders.HUGGINGFACE => new ProviderHuggingFace(logger, instanceProvider, model) { InstanceName = instanceName },
LLMProviders.HUGGINGFACE => new ProviderHuggingFace(logger, inferenceProvider, model) { InstanceName = instanceName },
LLMProviders.SELF_HOSTED => new ProviderSelfHosted(logger, host, hostname) { InstanceName = instanceName },
@ -234,10 +234,10 @@ public static class LLMProvidersExtensions
_ => false,
};
public static string GetModelsOverviewURL(this LLMProviders provider, HFInstanceProvider instanceProvider) => provider switch
public static string GetModelsOverviewURL(this LLMProviders provider, HFInferenceProvider inferenceProvider) => provider switch
{
LLMProviders.FIREWORKS => "https://fireworks.ai/models?show=Serverless",
LLMProviders.HUGGINGFACE => $"https://huggingface.co/models?inference_provider={instanceProvider.EndpointsId()}",
LLMProviders.HUGGINGFACE => $"https://huggingface.co/models?inference_provider={inferenceProvider.EndpointsId()}",
_ => string.Empty,
};

View File

@ -99,8 +99,9 @@ public sealed class ProviderMistral(ILogger logger) : BaseProvider("https://api.
return [];
return modelResponse.Data.Where(n =>
!n.Id.StartsWith("code", StringComparison.InvariantCulture) &&
!n.Id.Contains("embed", StringComparison.InvariantCulture))
!n.Id.StartsWith("code", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("embed", StringComparison.OrdinalIgnoreCase) &&
!n.Id.Contains("moderation", StringComparison.OrdinalIgnoreCase))
.Select(n => new Provider.Model(n.Id, null));
}

View File

@ -120,15 +120,20 @@ public sealed class ProviderOpenAI(ILogger logger) : BaseProvider("https://api.o
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override async Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return this.LoadModels(["gpt-", "o1-", "o3-", "o4-"], token, apiKeyProvisional);
var models = await this.LoadModels(["gpt-", "o1-", "o3-", "o4-"], token, apiKeyProvisional);
return models.Where(model => !model.Id.Contains("image", StringComparison.OrdinalIgnoreCase) &&
!model.Id.Contains("realtime", StringComparison.OrdinalIgnoreCase) &&
!model.Id.Contains("audio", StringComparison.OrdinalIgnoreCase) &&
!model.Id.Contains("tts", StringComparison.OrdinalIgnoreCase) &&
!model.Id.Contains("transcribe", StringComparison.OrdinalIgnoreCase));
}
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return this.LoadModels(["dall-e-"], token, apiKeyProvisional);
return this.LoadModels(["dall-e-", "gpt-image"], token, apiKeyProvisional);
}
/// <inheritdoc />

View File

@ -93,9 +93,10 @@ public sealed class ProviderX(ILogger logger) : BaseProvider("https://api.x.ai/v
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
/// <inheritdoc />
public override Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
public override async Task<IEnumerable<Model>> GetTextModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return this.LoadModels(["grok-"], token, apiKeyProvisional);
var models = await this.LoadModels(["grok-"], token, apiKeyProvisional);
return models.Where(n => !n.Id.Contains("-image", StringComparison.OrdinalIgnoreCase));
}
/// <inheritdoc />

View File

@ -25,7 +25,7 @@ public readonly record struct Provider(
bool IsSelfHosted = false,
string Hostname = "http://localhost:1234",
Host Host = Host.NONE,
HFInstanceProvider HFInstanceProvider = HFInstanceProvider.NONE) : ISecretId
HFInferenceProvider HFInferenceProvider = HFInferenceProvider.NONE) : ISecretId
{
#region Overrides of ValueType

View File

@ -2,6 +2,8 @@ namespace AIStudio.Tools.PluginSystem;
public static partial class PluginFactory
{
private static readonly SemaphoreSlim HOT_RELOAD_SEMAPHORE = new(1, 1);
public static void SetUpHotReloading()
{
if (!IS_INITIALIZED)
@ -20,7 +22,13 @@ public static partial class PluginFactory
HOT_RELOAD_WATCHER.Filter = "*.lua";
HOT_RELOAD_WATCHER.Changed += async (_, args) =>
{
LOG.LogInformation($"File changed: {args.FullPath}");
if (!await HOT_RELOAD_SEMAPHORE.WaitAsync(0))
{
LOG.LogInformation($"File changed ({args.ChangeType}): {args.FullPath}. Already processing another change.");
return;
}
LOG.LogInformation($"File changed ({args.ChangeType}): {args.FullPath}. Reloading plugins...");
await LoadAll();
await messageBus.SendMessage<bool>(null, Event.PLUGINS_RELOADED);
};

View File

@ -0,0 +1,152 @@
using System.Text;
using Lua;
using Lua.Standard;
namespace AIStudio.Tools.PluginSystem;
public static partial class PluginFactory
{
private static readonly List<IAvailablePlugin> AVAILABLE_PLUGINS = [];
private static readonly SemaphoreSlim PLUGIN_LOAD_SEMAPHORE = new(1, 1);
/// <summary>
/// A list of all available plugins.
/// </summary>
public static IReadOnlyCollection<IPluginMetadata> AvailablePlugins => AVAILABLE_PLUGINS;
/// <summary>
/// Try to load all plugins from the plugins directory.
/// </summary>
/// <remarks>
/// Loading plugins means:<br/>
/// - Parsing and checking the plugin code<br/>
/// - Check for forbidden plugins<br/>
/// - Creating a new instance of the allowed plugin<br/>
/// - Read the plugin metadata<br/>
/// <br/>
/// Loading a plugin does not mean to start the plugin, though.
/// </remarks>
public static async Task LoadAll(CancellationToken cancellationToken = default)
{
if (!IS_INITIALIZED)
{
LOG.LogError("PluginFactory is not initialized. Please call Setup() before using it.");
return;
}
if (!await PLUGIN_LOAD_SEMAPHORE.WaitAsync(0, cancellationToken))
return;
try
{
LOG.LogInformation("Start loading plugins.");
if (!Directory.Exists(PLUGINS_ROOT))
{
LOG.LogInformation("No plugins found.");
return;
}
AVAILABLE_PLUGINS.Clear();
//
// The easiest way to load all plugins is to find all `plugin.lua` files and load them.
// By convention, each plugin is enforced to have a `plugin.lua` file.
//
var pluginMainFiles = Directory.EnumerateFiles(PLUGINS_ROOT, "plugin.lua", SearchOption.AllDirectories);
foreach (var pluginMainFile in pluginMainFiles)
{
if (cancellationToken.IsCancellationRequested)
break;
LOG.LogInformation($"Try to load plugin: {pluginMainFile}");
var code = await File.ReadAllTextAsync(pluginMainFile, Encoding.UTF8, cancellationToken);
var pluginPath = Path.GetDirectoryName(pluginMainFile)!;
var plugin = await Load(pluginPath, code, cancellationToken);
switch (plugin)
{
case NoPlugin noPlugin when noPlugin.Issues.Any():
LOG.LogError($"Was not able to load plugin: '{pluginMainFile}'. Reason: {noPlugin.Issues.First()}");
continue;
case NoPlugin:
LOG.LogError($"Was not able to load plugin: '{pluginMainFile}'. Reason: Unknown.");
continue;
case { IsValid: false }:
LOG.LogError($"Was not able to load plugin '{pluginMainFile}', because the Lua code is not a valid AI Studio plugin. There are {plugin.Issues.Count()} issues to fix. First issue is: {plugin.Issues.FirstOrDefault()}");
#if DEBUG
foreach (var pluginIssue in plugin.Issues)
LOG.LogError($"Plugin issue: {pluginIssue}");
#endif
continue;
case { IsMaintained: false }:
LOG.LogWarning($"The plugin '{pluginMainFile}' is not maintained anymore. Please consider to disable it.");
break;
}
LOG.LogInformation($"Successfully loaded plugin: '{pluginMainFile}' (Id='{plugin.Id}', Type='{plugin.Type}', Name='{plugin.Name}', Version='{plugin.Version}', Authors='{string.Join(", ", plugin.Authors)}')");
AVAILABLE_PLUGINS.Add(new PluginMetadata(plugin, pluginPath));
}
// Start or restart all plugins:
await RestartAllPlugins(cancellationToken);
}
finally
{
PLUGIN_LOAD_SEMAPHORE.Release();
LOG.LogInformation("Finished loading plugins.");
}
}
private static async Task<PluginBase> Load(string pluginPath, string code, CancellationToken cancellationToken = default)
{
if(ForbiddenPlugins.Check(code) is { IsForbidden: true } forbiddenState)
return new NoPlugin($"This plugin is forbidden: {forbiddenState.Message}");
var state = LuaState.Create();
// Add the module loader so that the plugin can load other Lua modules:
state.ModuleLoader = new PluginLoader(pluginPath);
// Add some useful libraries:
state.OpenModuleLibrary();
state.OpenStringLibrary();
state.OpenTableLibrary();
state.OpenMathLibrary();
state.OpenBitwiseLibrary();
state.OpenCoroutineLibrary();
try
{
await state.DoStringAsync(code, cancellationToken: cancellationToken);
}
catch (LuaParseException e)
{
return new NoPlugin($"Was not able to parse the plugin: {e.Message}");
}
catch (LuaRuntimeException e)
{
return new NoPlugin($"Was not able to run the plugin: {e.Message}");
}
if (!state.Environment["TYPE"].TryRead<string>(out var typeText))
return new NoPlugin("TYPE does not exist or is not a valid string.");
if (!Enum.TryParse<PluginType>(typeText, out var type))
return new NoPlugin($"TYPE is not a valid plugin type. Valid types are: {CommonTools.GetAllEnumValues<PluginType>()}");
if(type is PluginType.NONE)
return new NoPlugin($"TYPE is not a valid plugin type. Valid types are: {CommonTools.GetAllEnumValues<PluginType>()}");
var isInternal = pluginPath.StartsWith(INTERNAL_PLUGINS_ROOT, StringComparison.OrdinalIgnoreCase);
return type switch
{
PluginType.LANGUAGE => new PluginLanguage(isInternal, state, type),
_ => new NoPlugin("This plugin type is not supported yet. Please try again with a future version of AI Studio.")
};
}
}

View File

@ -0,0 +1,101 @@
using System.Text;
namespace AIStudio.Tools.PluginSystem;
public static partial class PluginFactory
{
private static readonly List<PluginBase> RUNNING_PLUGINS = [];
/// <summary>
/// A list of all running plugins.
/// </summary>
public static IReadOnlyCollection<PluginBase> RunningPlugins => RUNNING_PLUGINS;
private static async Task RestartAllPlugins(CancellationToken cancellationToken = default)
{
LOG.LogInformation("Try to start or restart all plugins.");
RUNNING_PLUGINS.Clear();
//
// Get the base language plugin. This is the plugin that will be used to fill in missing keys.
//
var baseLanguagePluginId = InternalPlugin.LANGUAGE_EN_US.MetaData().Id;
var baseLanguagePluginMetaData = AVAILABLE_PLUGINS.FirstOrDefault(p => p.Id == baseLanguagePluginId);
if (baseLanguagePluginMetaData is null)
{
LOG.LogError($"Was not able to find the base language plugin: Id='{baseLanguagePluginId}'. Please check your installation.");
return;
}
var startedBasePlugin = await Start(baseLanguagePluginMetaData, cancellationToken);
if (startedBasePlugin is NoPlugin noPlugin)
{
LOG.LogError($"Was not able to start the base language plugin: Id='{baseLanguagePluginId}'. Reason: {noPlugin.Issues.First()}");
return;
}
if (startedBasePlugin is PluginLanguage languagePlugin)
{
BASE_LANGUAGE_PLUGIN = languagePlugin;
RUNNING_PLUGINS.Add(languagePlugin);
LOG.LogInformation($"Successfully started the base language plugin: Id='{languagePlugin.Id}', Type='{languagePlugin.Type}', Name='{languagePlugin.Name}', Version='{languagePlugin.Version}'");
}
else
{
LOG.LogError($"Was not able to start the base language plugin: Id='{baseLanguagePluginId}'. Reason: {string.Join("; ", startedBasePlugin.Issues)}");
return;
}
//
// Iterate over all available plugins and try to start them.
//
foreach (var availablePlugin in AVAILABLE_PLUGINS)
{
if(cancellationToken.IsCancellationRequested)
break;
if (availablePlugin.Id == baseLanguagePluginId)
continue;
if (availablePlugin.IsInternal || SETTINGS_MANAGER.IsPluginEnabled(availablePlugin))
if(await Start(availablePlugin, cancellationToken) is { IsValid: true } plugin)
RUNNING_PLUGINS.Add(plugin);
// Inform all components that the plugins have been reloaded or started:
await MessageBus.INSTANCE.SendMessage<bool>(null, Event.PLUGINS_RELOADED);
}
}
private static async Task<PluginBase> Start(IAvailablePlugin meta, CancellationToken cancellationToken = default)
{
var pluginMainFile = Path.Join(meta.LocalPath, "plugin.lua");
if(!File.Exists(pluginMainFile))
{
LOG.LogError($"Was not able to start plugin: Id='{meta.Id}', Type='{meta.Type}', Name='{meta.Name}', Version='{meta.Version}'. Reason: The plugin file does not exist.");
return new NoPlugin($"The plugin file does not exist: {pluginMainFile}");
}
var code = await File.ReadAllTextAsync(pluginMainFile, Encoding.UTF8, cancellationToken);
var plugin = await Load(meta.LocalPath, code, cancellationToken);
if (plugin is NoPlugin noPlugin)
{
LOG.LogError($"Was not able to start plugin: Id='{meta.Id}', Type='{meta.Type}', Name='{meta.Name}', Version='{meta.Version}'. Reason: {noPlugin.Issues.First()}");
return noPlugin;
}
if (plugin.IsValid)
{
//
// When this is a language plugin, we need to set the base language plugin.
//
if (plugin is PluginLanguage languagePlugin && BASE_LANGUAGE_PLUGIN != NoPluginLanguage.INSTANCE)
languagePlugin.SetBaseLanguage(BASE_LANGUAGE_PLUGIN);
LOG.LogInformation($"Successfully started plugin: Id='{plugin.Id}', Type='{plugin.Type}', Name='{plugin.Name}', Version='{plugin.Version}'");
return plugin;
}
LOG.LogError($"Was not able to start plugin: Id='{meta.Id}', Type='{meta.Type}', Name='{meta.Name}', Version='{meta.Version}'. Reasons: {string.Join("; ", plugin.Issues)}");
return new NoPlugin($"Was not able to start plugin: Id='{meta.Id}', Type='{meta.Type}', Name='{meta.Name}', Version='{meta.Version}'. Reasons: {string.Join("; ", plugin.Issues)}");
}
}

View File

@ -1,35 +1,18 @@
using System.Text;
using AIStudio.Settings;
using Lua;
using Lua.Standard;
namespace AIStudio.Tools.PluginSystem;
public static partial class PluginFactory
{
private static readonly ILogger LOG = Program.LOGGER_FACTORY.CreateLogger(nameof(PluginFactory));
private static readonly SettingsManager SETTINGS_MANAGER = Program.SERVICE_PROVIDER.GetRequiredService<SettingsManager>();
private static readonly List<IAvailablePlugin> AVAILABLE_PLUGINS = [];
private static readonly List<PluginBase> RUNNING_PLUGINS = [];
private static bool IS_INITIALIZED;
private static string DATA_DIR = string.Empty;
private static string PLUGINS_ROOT = string.Empty;
private static string INTERNAL_PLUGINS_ROOT = string.Empty;
private static FileSystemWatcher HOT_RELOAD_WATCHER = null!;
private static ILanguagePlugin BASE_LANGUAGE_PLUGIN = NoPluginLanguage.INSTANCE;
/// <summary>
/// A list of all available plugins.
/// </summary>
public static IReadOnlyCollection<IPluginMetadata> AvailablePlugins => AVAILABLE_PLUGINS;
/// <summary>
/// A list of all running plugins.
/// </summary>
public static IReadOnlyCollection<PluginBase> RunningPlugins => RUNNING_PLUGINS;
public static ILanguagePlugin BaseLanguage => BASE_LANGUAGE_PLUGIN;
@ -37,8 +20,11 @@ public static partial class PluginFactory
/// Set up the plugin factory. We will read the data directory from the settings manager.
/// Afterward, we will create the plugins directory and the internal plugin directory.
/// </summary>
public static void Setup()
public static bool Setup()
{
if(IS_INITIALIZED)
return false;
DATA_DIR = SettingsManager.DataDirectory!;
PLUGINS_ROOT = Path.Join(DATA_DIR, "plugins");
INTERNAL_PLUGINS_ROOT = Path.Join(PLUGINS_ROOT, ".internal");
@ -48,218 +34,8 @@ public static partial class PluginFactory
HOT_RELOAD_WATCHER = new(PLUGINS_ROOT);
IS_INITIALIZED = true;
}
/// <summary>
/// Try to load all plugins from the plugins directory.
/// </summary>
/// <remarks>
/// Loading plugins means:<br/>
/// - Parsing and checking the plugin code<br/>
/// - Check for forbidden plugins<br/>
/// - Creating a new instance of the allowed plugin<br/>
/// - Read the plugin metadata<br/>
/// <br/>
/// Loading a plugin does not mean to start the plugin, though.
/// </remarks>
public static async Task LoadAll(CancellationToken cancellationToken = default)
{
if (!IS_INITIALIZED)
{
LOG.LogError("PluginFactory is not initialized. Please call Setup() before using it.");
return;
}
LOG.LogInformation("Start loading plugins.");
if (!Directory.Exists(PLUGINS_ROOT))
{
LOG.LogInformation("No plugins found.");
return;
}
AVAILABLE_PLUGINS.Clear();
//
// The easiest way to load all plugins is to find all `plugin.lua` files and load them.
// By convention, each plugin is enforced to have a `plugin.lua` file.
//
var pluginMainFiles = Directory.EnumerateFiles(PLUGINS_ROOT, "plugin.lua", SearchOption.AllDirectories);
foreach (var pluginMainFile in pluginMainFiles)
{
if (cancellationToken.IsCancellationRequested)
break;
LOG.LogInformation($"Try to load plugin: {pluginMainFile}");
var code = await File.ReadAllTextAsync(pluginMainFile, Encoding.UTF8, cancellationToken);
var pluginPath = Path.GetDirectoryName(pluginMainFile)!;
var plugin = await Load(pluginPath, code, cancellationToken);
switch (plugin)
{
case NoPlugin noPlugin when noPlugin.Issues.Any():
LOG.LogError($"Was not able to load plugin: '{pluginMainFile}'. Reason: {noPlugin.Issues.First()}");
continue;
case NoPlugin:
LOG.LogError($"Was not able to load plugin: '{pluginMainFile}'. Reason: Unknown.");
continue;
case { IsValid: false }:
LOG.LogError($"Was not able to load plugin '{pluginMainFile}', because the Lua code is not a valid AI Studio plugin. There are {plugin.Issues.Count()} issues to fix.");
#if DEBUG
foreach (var pluginIssue in plugin.Issues)
LOG.LogError($"Plugin issue: {pluginIssue}");
#endif
continue;
case { IsMaintained: false }:
LOG.LogWarning($"The plugin '{pluginMainFile}' is not maintained anymore. Please consider to disable it.");
break;
}
LOG.LogInformation($"Successfully loaded plugin: '{pluginMainFile}' (Id='{plugin.Id}', Type='{plugin.Type}', Name='{plugin.Name}', Version='{plugin.Version}', Authors='{string.Join(", ", plugin.Authors)}')");
AVAILABLE_PLUGINS.Add(new PluginMetadata(plugin, pluginPath));
}
// Start or restart all plugins:
await RestartAllPlugins(cancellationToken);
}
private static async Task<PluginBase> Load(string pluginPath, string code, CancellationToken cancellationToken = default)
{
if(ForbiddenPlugins.Check(code) is { IsForbidden: true } forbiddenState)
return new NoPlugin($"This plugin is forbidden: {forbiddenState.Message}");
var state = LuaState.Create();
// Add the module loader so that the plugin can load other Lua modules:
state.ModuleLoader = new PluginLoader(pluginPath);
// Add some useful libraries:
state.OpenModuleLibrary();
state.OpenStringLibrary();
state.OpenTableLibrary();
state.OpenMathLibrary();
state.OpenBitwiseLibrary();
state.OpenCoroutineLibrary();
try
{
await state.DoStringAsync(code, cancellationToken: cancellationToken);
}
catch (LuaParseException e)
{
return new NoPlugin($"Was not able to parse the plugin: {e.Message}");
}
catch (LuaRuntimeException e)
{
return new NoPlugin($"Was not able to run the plugin: {e.Message}");
}
if (!state.Environment["TYPE"].TryRead<string>(out var typeText))
return new NoPlugin("TYPE does not exist or is not a valid string.");
if (!Enum.TryParse<PluginType>(typeText, out var type))
return new NoPlugin($"TYPE is not a valid plugin type. Valid types are: {CommonTools.GetAllEnumValues<PluginType>()}");
if(type is PluginType.NONE)
return new NoPlugin($"TYPE is not a valid plugin type. Valid types are: {CommonTools.GetAllEnumValues<PluginType>()}");
var isInternal = pluginPath.StartsWith(INTERNAL_PLUGINS_ROOT, StringComparison.OrdinalIgnoreCase);
return type switch
{
PluginType.LANGUAGE => new PluginLanguage(isInternal, state, type),
_ => new NoPlugin("This plugin type is not supported yet. Please try again with a future version of AI Studio.")
};
}
private static async Task RestartAllPlugins(CancellationToken cancellationToken = default)
{
LOG.LogInformation("Try to start or restart all plugins.");
RUNNING_PLUGINS.Clear();
//
// Get the base language plugin. This is the plugin that will be used to fill in missing keys.
//
var baseLanguagePluginId = InternalPlugin.LANGUAGE_EN_US.MetaData().Id;
var baseLanguagePluginMetaData = AVAILABLE_PLUGINS.FirstOrDefault(p => p.Id == baseLanguagePluginId);
if (baseLanguagePluginMetaData is null)
{
LOG.LogError($"Was not able to find the base language plugin: Id='{baseLanguagePluginId}'. Please check your installation.");
return;
}
var startedBasePlugin = await Start(baseLanguagePluginMetaData, cancellationToken);
if (startedBasePlugin is NoPlugin noPlugin)
{
LOG.LogError($"Was not able to start the base language plugin: Id='{baseLanguagePluginId}'. Reason: {noPlugin.Issues.First()}");
return;
}
if (startedBasePlugin is PluginLanguage languagePlugin)
{
BASE_LANGUAGE_PLUGIN = languagePlugin;
RUNNING_PLUGINS.Add(languagePlugin);
LOG.LogInformation($"Successfully started the base language plugin: Id='{languagePlugin.Id}', Type='{languagePlugin.Type}', Name='{languagePlugin.Name}', Version='{languagePlugin.Version}'");
}
else
{
LOG.LogError($"Was not able to start the base language plugin: Id='{baseLanguagePluginId}'. Reason: {string.Join("; ", startedBasePlugin.Issues)}");
return;
}
//
// Iterate over all available plugins and try to start them.
//
foreach (var availablePlugin in AVAILABLE_PLUGINS)
{
if(cancellationToken.IsCancellationRequested)
break;
if (availablePlugin.Id == baseLanguagePluginId)
continue;
if (availablePlugin.IsInternal || SETTINGS_MANAGER.IsPluginEnabled(availablePlugin))
if(await Start(availablePlugin, cancellationToken) is { IsValid: true } plugin)
RUNNING_PLUGINS.Add(plugin);
// Inform all components that the plugins have been reloaded or started:
await MessageBus.INSTANCE.SendMessage<bool>(null, Event.PLUGINS_RELOADED);
}
}
private static async Task<PluginBase> Start(IAvailablePlugin meta, CancellationToken cancellationToken = default)
{
var pluginMainFile = Path.Join(meta.LocalPath, "plugin.lua");
if(!File.Exists(pluginMainFile))
{
LOG.LogError($"Was not able to start plugin: Id='{meta.Id}', Type='{meta.Type}', Name='{meta.Name}', Version='{meta.Version}'. Reason: The plugin file does not exist.");
return new NoPlugin($"The plugin file does not exist: {pluginMainFile}");
}
var code = await File.ReadAllTextAsync(pluginMainFile, Encoding.UTF8, cancellationToken);
var plugin = await Load(meta.LocalPath, code, cancellationToken);
if (plugin is NoPlugin noPlugin)
{
LOG.LogError($"Was not able to start plugin: Id='{meta.Id}', Type='{meta.Type}', Name='{meta.Name}', Version='{meta.Version}'. Reason: {noPlugin.Issues.First()}");
return noPlugin;
}
if (plugin.IsValid)
{
//
// When this is a language plugin, we need to set the base language plugin.
//
if (plugin is PluginLanguage languagePlugin && BASE_LANGUAGE_PLUGIN != NoPluginLanguage.INSTANCE)
languagePlugin.SetBaseLanguage(BASE_LANGUAGE_PLUGIN);
LOG.LogInformation($"Successfully started plugin: Id='{plugin.Id}', Type='{plugin.Type}', Name='{plugin.Name}', Version='{plugin.Version}'");
return plugin;
}
LOG.LogError($"Was not able to start plugin: Id='{meta.Id}', Type='{meta.Type}', Name='{meta.Name}', Version='{meta.Version}'. Reasons: {string.Join("; ", plugin.Issues)}");
return new NoPlugin($"Was not able to start plugin: Id='{meta.Id}', Type='{meta.Type}', Name='{meta.Name}', Version='{meta.Version}'. Reasons: {string.Join("; ", plugin.Issues)}");
return true;
}
public static void Dispose()

View File

@ -94,13 +94,13 @@ public sealed class ProviderValidation
return null;
}
public string? ValidatingHFInstanceProvider(HFInstanceProvider instanceProvider)
public string? ValidatingHFInstanceProvider(HFInferenceProvider inferenceProvider)
{
if(this.GetProvider() is not LLMProviders.HUGGINGFACE)
return null;
if (instanceProvider is HFInstanceProvider.NONE)
return "Please select an Hugging Face instance provider.";
if (inferenceProvider is HFInferenceProvider.NONE)
return "Please select an Hugging Face inference provider.";
return null;
}

View File

@ -18,9 +18,9 @@
},
"HtmlAgilityPack": {
"type": "Direct",
"requested": "[1.12.0, )",
"resolved": "1.12.0",
"contentHash": "VHtVZmfoYhQyA/POvZRLuTpCz1zhzIDrdYRJIRV73e9wKAzjW71biYNOHOWx8MxEX3TE4TWVfx1QDRoZcj2AWw=="
"requested": "[1.12.1, )",
"resolved": "1.12.1",
"contentHash": "SP6/2Y26CXtxjXn0Wwsom9Ek35SNWKHEu/IWhNEFejBSSVWWXPRSlpqpBSYWv1SQhYFnwMO01xVbEdK3iRR4hg=="
},
"LuaCSharp": {
"type": "Direct",
@ -30,18 +30,18 @@
},
"Microsoft.Extensions.FileProviders.Embedded": {
"type": "Direct",
"requested": "[9.0.3, )",
"resolved": "9.0.3",
"contentHash": "UKfKGlZ7jKfe6v4rLsjnH/mGbD3e4YD9EK+Uobu+KIxwfhZuLLCtXm4CWTOf2s1t+ItmMs0QqbSJAXaMXCxLOw==",
"requested": "[9.0.4, )",
"resolved": "9.0.4",
"contentHash": "fictUnSF95D+M9iH4X6TYBjud2gbB2r6bcIi0sQknXFc2bHbNucoaK+SzfLCzb47tHSR9a5pm0F1Ioj0PgmFeQ==",
"dependencies": {
"Microsoft.Extensions.FileProviders.Abstractions": "9.0.3"
"Microsoft.Extensions.FileProviders.Abstractions": "9.0.4"
}
},
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[9.0.3, )",
"resolved": "9.0.3",
"contentHash": "1rqGTfubVg0qj2PsK6esyq3PIxtYJYrN3LsYUV9RrvH3anmt3fT3ozYdAZZH4U8JU/pt5pPIUk8NBSu26wtekA=="
"requested": "[9.0.4, )",
"resolved": "9.0.4",
"contentHash": "xUdlUxiFwXhTYhB4VxKg/IA0+jlZXJPo70LYuMryWbJHdonIpZjw+7DO2B0pWwpXIOs6MlH5WVXPEtfrGEcVZA=="
},
"MudBlazor": {
"type": "Direct",
@ -56,12 +56,13 @@
},
"MudBlazor.Markdown": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "0DcXQFEIgKJsaMCDva0Ck3gempoctyc7s8GLK5VagozlZdXql6W4SKX/imM/NfyfV7SxLrUTRJyLJX0Te+02sQ==",
"requested": "[8.5.1, )",
"resolved": "8.5.1",
"contentHash": "UrNr948Nn70CuDoTWaN/HgtaIKO4oKfj7W6Mw8Ei9OG7MCzCwkhUYJLe/CRTKCkt3wINquB/2AAN7ezgEfmbww==",
"dependencies": {
"Markdig": "0.40.0",
"MudBlazor": "8.0.0"
"Markdig": "0.41.0",
"Microsoft.Extensions.Caching.Memory": "9.0.4",
"MudBlazor": "8.5.1"
}
},
"ReverseMarkdown": {
@ -85,8 +86,8 @@
},
"Markdig": {
"type": "Transitive",
"resolved": "0.40.0",
"contentHash": "4ve14zs+gt1irldTQE3y5FLAHuzmhW7T99lAAvVipe/q2LWT/nUCO0iICb9TXGvMX6n7Z1OZroFXkdSy91rO8w=="
"resolved": "0.41.0",
"contentHash": "nEGSjfQ2i+MzJjvCZqoIBqW2x0iBALhhVogud48oPA/39a0n0jOhghdTYdm4xaDFBXmc4MxsVJAP5gtdvADvWQ=="
},
"Microsoft.AspNetCore.Authorization": {
"type": "Transitive",
@ -137,6 +138,26 @@
"resolved": "9.0.1",
"contentHash": "EZnHifamF7IFEIyjAKMtJM3I/94OIe72i3P09v5oL0twmsmfQwal6Ni3m8lbB5mge3jWFhMozeW+rUdRSqnXRQ=="
},
"Microsoft.Extensions.Caching.Abstractions": {
"type": "Transitive",
"resolved": "9.0.4",
"contentHash": "imcZ5BGhBw5mNsWLepBbqqumWaFe0GtvyCvne2/2wsDIBRa2+Lhx4cU/pKt/4BwOizzUEOls2k1eOJQXHGMalg==",
"dependencies": {
"Microsoft.Extensions.Primitives": "9.0.4"
}
},
"Microsoft.Extensions.Caching.Memory": {
"type": "Transitive",
"resolved": "9.0.4",
"contentHash": "G5rEq1Qez5VJDTEyRsRUnewAspKjaY57VGsdZ8g8Ja6sXXzoiI3PpTd1t43HjHqNWD5A06MQveb2lscn+2CU+w==",
"dependencies": {
"Microsoft.Extensions.Caching.Abstractions": "9.0.4",
"Microsoft.Extensions.DependencyInjection.Abstractions": "9.0.4",
"Microsoft.Extensions.Logging.Abstractions": "9.0.4",
"Microsoft.Extensions.Options": "9.0.4",
"Microsoft.Extensions.Primitives": "9.0.4"
}
},
"Microsoft.Extensions.DependencyInjection": {
"type": "Transitive",
"resolved": "9.0.1",
@ -147,15 +168,15 @@
},
"Microsoft.Extensions.DependencyInjection.Abstractions": {
"type": "Transitive",
"resolved": "9.0.1",
"contentHash": "Tr74eP0oQ3AyC24ch17N8PuEkrPbD0JqIfENCYqmgKYNOmL8wQKzLJu3ObxTUDrjnn4rHoR1qKa37/eQyHmCDA=="
"resolved": "9.0.4",
"contentHash": "UI0TQPVkS78bFdjkTodmkH0Fe8lXv9LnhGFKgKrsgUJ5a5FVdFRcgjIkBVLbGgdRhxWirxH/8IXUtEyYJx6GQg=="
},
"Microsoft.Extensions.FileProviders.Abstractions": {
"type": "Transitive",
"resolved": "9.0.3",
"contentHash": "umczZ3+QPpzlrW/lkvy+IB0p52+qZ5w++aqx2lTCMOaPKzwcbVdrJgiQ3ajw5QWBp7gChLUiCYkSlWUpfjv24g==",
"resolved": "9.0.4",
"contentHash": "gQN2o/KnBfVk6Bd71E2YsvO5lsqrqHmaepDGk+FB/C4aiQY9B0XKKNKfl5/TqcNOs9OEithm4opiMHAErMFyEw==",
"dependencies": {
"Microsoft.Extensions.Primitives": "9.0.3"
"Microsoft.Extensions.Primitives": "9.0.4"
}
},
"Microsoft.Extensions.Localization": {
@ -176,25 +197,25 @@
},
"Microsoft.Extensions.Logging.Abstractions": {
"type": "Transitive",
"resolved": "9.0.1",
"contentHash": "w2gUqXN/jNIuvqYwX3lbXagsizVNXYyt6LlF57+tMve4JYCEgCMMAjRce6uKcDASJgpMbErRT1PfHy2OhbkqEA==",
"resolved": "9.0.4",
"contentHash": "0MXlimU4Dud6t+iNi5NEz3dO2w1HXdhoOLaYFuLPCjAsvlPQGwOT6V2KZRMLEhCAm/stSZt1AUv0XmDdkjvtbw==",
"dependencies": {
"Microsoft.Extensions.DependencyInjection.Abstractions": "9.0.1"
"Microsoft.Extensions.DependencyInjection.Abstractions": "9.0.4"
}
},
"Microsoft.Extensions.Options": {
"type": "Transitive",
"resolved": "9.0.1",
"contentHash": "nggoNKnWcsBIAaOWHA+53XZWrslC7aGeok+aR+epDPRy7HI7GwMnGZE8yEsL2Onw7kMOHVHwKcsDls1INkNUJQ==",
"resolved": "9.0.4",
"contentHash": "fiFI2+58kicqVZyt/6obqoFwHiab7LC4FkQ3mmiBJ28Yy4fAvy2+v9MRnSvvlOO8chTOjKsdafFl/K9veCPo5g==",
"dependencies": {
"Microsoft.Extensions.DependencyInjection.Abstractions": "9.0.1",
"Microsoft.Extensions.Primitives": "9.0.1"
"Microsoft.Extensions.DependencyInjection.Abstractions": "9.0.4",
"Microsoft.Extensions.Primitives": "9.0.4"
}
},
"Microsoft.Extensions.Primitives": {
"type": "Transitive",
"resolved": "9.0.3",
"contentHash": "yCCJHvBcRyqapMSNzP+kTc57Eaavq2cr5Tmuil6/XVnipQf5xmskxakSQ1enU6S4+fNg3sJ27WcInV64q24JsA=="
"resolved": "9.0.4",
"contentHash": "SPFyMjyku1nqTFFJ928JAMd0QnRe4xjE7KeKnZMWXf3xk+6e0WiOZAluYtLdbJUXtsl2cCRSi8cBquJ408k8RA=="
},
"Microsoft.JSInterop": {
"type": "Transitive",

View File

@ -1,7 +1,11 @@
# v0.9.40, build 215 (2025-04-xx xx:xx UTC)
- Added support for the announced OpenAI `o4` models. We hope that these `o4` models will be usable by the well-known chat completion API instead of the new responses API, though. AI Studio cannot use the new responses API right now.
# v0.9.40, build 215 (2025-04-20 13:30 UTC)
- Added support for the announced OpenAI `o4` models.
- Added Alibaba Cloud as a new provider. Thanks Peer `peerschuett` for the contribution.
- Added the Hugging Face inference provider as an LLM provider to AI Studio. Thanks Peer `peerschuett` for the contribution.
- Added the current CPU architecture to the about page. This information helps us identify issues.
- Improved the LLM & embedding provider dialogs by hiding not relevant options.
- Improved the provider selection by showing the name of the provider in the provider selection instead of its identifier.
- Improved the developer experience by adding a tolerant enum converter for better configuration handling.
- Fixed an issue where OpenAI `o3` models were not shown in the model selection.
- Upgraded to .NET 9.0.4.
- Upgraded .NET & Rust dependencies.

View File

@ -0,0 +1,8 @@
# v0.9.41, build 216 (2025-0x-xx xx:xx UTC)
- Added the user-language, as provided by the OS, to the about page. This helps in identifying user-specific issues related to language settings.
- Changed the terminology from "temporary chats" to "disappearing chats" in the UI. This makes it clearer to understand the purpose of these chats.
- Improved the hot reloading of the plugin system to prevent overlapping reloads.
- Improved the app behavior when the user system was waked up from sleep mode.
- Improved the provider dialog with better input handling for API keys and an optimized model selection.
- Improved provider's model selection by filtering added non-text outputting models, which are not supported yet.
- Fixed the color for the update notification button to match the color theme.

View File

@ -1,10 +1,16 @@
# Building
You just want to use the app? Then simply [download the appropriate setup for your operating system](Setup.md). This chapter is intended for developers who want to modify and customize the code.
## Prefaces regarding Linux development systems
Unfortunately, we have to provide a note regarding development on Linux systems. MindWork AI Studio consists of a Rust and a .NET part. Compiling the .NET code works smoothly on all operating systems. However, this is not the case for our Rust part. More specifically, it is not the Rust code itself that is problematic, but rather the Tauri framework on which we base our work. Tauri has certain dependencies that depend on the operating system. The specific dependencies vary between different Linux distributions and between versions of distributions.
Therefore, we cannot provide a static list here that is valid for all Linux systems. Unfortunately, the situation is even more complex: Tauri requires dependencies that are not available in current Linux distributions because they already include newer versions. **For these reasons, we currently advise against developing AI Studio on Linux.** In case you still want to try, you will need a lot of patience and willingness to experiment. We ask for your understanding. Thank you very much.
## Prerequisites
1. Install the [.NET 9 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/9.0).
2. [Install the Rust compiler](https://www.rust-lang.org/tools/install) in the latest version.
3. Met the prerequisites for building [Tauri](https://tauri.app/v1/guides/getting-started/prerequisites/). Node.js is **not** required, though.
4. The core team uses [JetBrains](https://www.jetbrains.com/) [Rider](https://www.jetbrains.com/rider/) and [RustRover](https://www.jetbrains.com/rust/) for development. Both IDEs are free to use for open-source projects for non-commercial use. They are available for macOS, Linux, and Windows systems. Profiles are provided for these IDEs, so you can get started right away. However, you can also use a different IDE.
4. Clone the repository.
## One-time mandatory steps

View File

@ -14,7 +14,7 @@ AI Studio is only available for modern 64-bit Windows systems. When you have an
- **Intel/AMD:** In almost all other cases, you have an Intel/AMD system. [Download the x64 version](https://github.com/MindWorkAI/AI-Studio/releases/latest/download/MindWork.AI.Studio_x64-setup.exe) of AI Studio.
When you try to install the app, you get a message regarding protection of your PC (see screenshots below). For Windows to trust our app, we need to purchase a certificate that costs around $1000 per year. Would you like to help us with this? [Please consider supporting us](https://github.com/sponsors/MindWorkAI). You might want to [visit our release page](https://github.com/MindWorkAI/AI-Studio/releases/latest). There, we provide VirusTotal scan results for each release. If you are unsure about the safety of the app, you can check the results there. Ensure that the majority of scanners have a green checkmark.
When you try to install the app, you get a message regarding protection of your PC (see screenshots below). For Windows to trust our app, we need to purchase a certificate that [costs around $1000 per year](https://github.com/MindWorkAI/Planning/issues/56). Would you like to help us with this? [Please consider supporting us](https://github.com/sponsors/MindWorkAI). You might want to [visit our release page](https://github.com/MindWorkAI/AI-Studio/releases/latest). There, we provide VirusTotal scan results for each release. If you are unsure about the safety of the app, you can check the results there. Ensure that the majority of scanners have a green checkmark.
When you are confident in the app's safety, click on "More info" and then "Run anyway" to proceed with the installation:
@ -43,7 +43,7 @@ When you try to open the app, you get a message that the app is damaged:
![macOS Installation 2](macOS%20Damage.png)
This is because we don't have an Apple Developer account, which costs around $100 per year. Would you like to help us with this? [Please consider supporting us](https://github.com/sponsors/MindWorkAI). You might want to [visit our release page](https://github.com/MindWorkAI/AI-Studio/releases/latest). There, we provide VirusTotal scan results for each release. If you are unsure about the safety of the app, you can check the results there. Ensure that the majority of scanners have a green checkmark.
This is because we don't have an Apple Developer account, [which costs around $100 per year](https://github.com/MindWorkAI/Planning/issues/56). Would you like to help us with this? [Please consider supporting us](https://github.com/sponsors/MindWorkAI). You might want to [visit our release page](https://github.com/MindWorkAI/AI-Studio/releases/latest). There, we provide VirusTotal scan results for each release. If you are unsure about the safety of the app, you can check the results there. Ensure that the majority of scanners have a green checkmark.
When you are confident in the app's safety, follow these steps:

View File

@ -1,10 +1,10 @@
0.9.39
2025-04-07 17:39:09 UTC
214
9.0.104 (commit 2750432faa)
9.0.3 (commit 831d23e561)
0.9.40
2025-04-20 13:30:03 UTC
215
9.0.105 (commit 35890ecb87)
9.0.4 (commit f57e6dc747)
1.86.0 (commit 05f9846f8)
8.5.1
1.8.1
19935769035, release
2144cfe0590, release
osx-arm64

18
runtime/Cargo.lock generated
View File

@ -711,9 +711,9 @@ dependencies = [
[[package]]
name = "crossbeam-channel"
version = "0.5.13"
version = "0.5.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2"
checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2"
dependencies = [
"crossbeam-utils",
]
@ -2610,7 +2610,7 @@ checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
[[package]]
name = "mindwork-ai-studio"
version = "0.9.39"
version = "0.9.40"
dependencies = [
"aes",
"arboard",
@ -2619,6 +2619,7 @@ dependencies = [
"calamine",
"cbc",
"cipher",
"crossbeam-channel",
"file-format",
"flexi_logger",
"futures",
@ -2629,7 +2630,7 @@ dependencies = [
"openssl",
"pbkdf2",
"pdfium-render",
"rand 0.9.0",
"rand 0.9.1",
"rand_chacha 0.9.0",
"rcgen",
"reqwest 0.12.15",
@ -3199,9 +3200,9 @@ dependencies = [
[[package]]
name = "pdfium-render"
version = "0.8.29"
version = "0.8.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5cbb29c282cfbd0a8142ccd3bb0ce8da53e59141ce02a023b980bc72b6c0eec"
checksum = "2773a939ec2c736640f5f2e62a325c2e1a997d694961c50f17cadfb4c8682e84"
dependencies = [
"bitflags 2.6.0",
"bytemuck",
@ -3615,13 +3616,12 @@ dependencies = [
[[package]]
name = "rand"
version = "0.9.0"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94"
checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97"
dependencies = [
"rand_chacha 0.9.0",
"rand_core 0.9.0",
"zerocopy",
]
[[package]]

View File

@ -1,6 +1,6 @@
[package]
name = "mindwork-ai-studio"
version = "0.9.39"
version = "0.9.40"
edition = "2021"
description = "MindWork AI Studio"
authors = ["Thorsten Sommer"]
@ -23,7 +23,7 @@ flexi_logger = "0.30.1"
log = { version = "0.4.27", features = ["kv"] }
once_cell = "1.21.3"
rocket = { version = "0.5.1", features = ["json", "tls"] }
rand = "0.9"
rand = "0.9.1"
rand_chacha = "0.9"
base64 = "0.22.1"
cipher = { version = "0.4.4", features = ["std"] }
@ -35,12 +35,13 @@ sha2 = "0.10.8"
rcgen = { version = "0.13.2", features = ["pem"] }
file-format = "0.26.0"
calamine = "0.26.1"
pdfium-render = "0.8.29"
pdfium-render = "0.8.30"
sys-locale = "0.3.2"
# Fixes security vulnerability downstream, where the upstream is not fixed yet:
url = "2.5"
ring = "0.17.14"
crossbeam-channel = "0.5.15"
[target.'cfg(target_os = "linux")'.dependencies]
# See issue https://github.com/tauri-apps/tauri/issues/4470

View File

@ -6,7 +6,7 @@
},
"package": {
"productName": "MindWork AI Studio",
"version": "0.9.39"
"version": "0.9.40"
},
"tauri": {
"allowlist": {