Compare commits

..

No commits in common. "main" and "v26.5.1" have entirely different histories.

51 changed files with 3172 additions and 3963 deletions

View File

@ -12,10 +12,6 @@ on:
- synchronize
- reopened
concurrency:
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && (github.event.action != 'labeled' || github.event.label.name == 'run-pipeline') && github.event.pull_request.number || github.run_id }}
cancel-in-progress: ${{ github.event_name == 'pull_request' && (github.event.action != 'labeled' || github.event.label.name == 'run-pipeline') }}
env:
RETENTION_INTERMEDIATE_ASSETS: 1
RETENTION_RELEASE_ASSETS: 30
@ -41,8 +37,6 @@ jobs:
id: determine
env:
EVENT_NAME: ${{ github.event_name }}
PR_ACTION: ${{ github.event.action }}
ACTION_LABEL_NAME: ${{ github.event.label.name }}
REF: ${{ github.ref }}
PR_LABELS: ${{ join(github.event.pull_request.labels.*.name, ' ') }}
PR_HEAD_REPO: ${{ github.event.pull_request.head.repo.full_name }}
@ -61,11 +55,6 @@ jobs:
is_internal_pr=true
fi
has_run_pipeline_label=false
if [[ " $PR_LABELS " == *" run-pipeline "* ]]; then
has_run_pipeline_label=true
fi
if [[ "$REF" == refs/tags/v* ]]; then
is_release=true
build_enabled=true
@ -76,21 +65,13 @@ jobs:
build_enabled=true
artifact_retention_days=7
skip_reason=""
elif [[ "$EVENT_NAME" == "pull_request" && "$PR_ACTION" == "labeled" && "$ACTION_LABEL_NAME" == "run-pipeline" ]]; then
elif [[ "$EVENT_NAME" == "pull_request" && " $PR_LABELS " == *" run-pipeline "* ]]; then
is_labeled_pr=true
is_pr_build=true
build_enabled=true
artifact_retention_days=3
skip_reason=""
elif [[ "$EVENT_NAME" == "pull_request" && "$PR_ACTION" != "labeled" && "$has_run_pipeline_label" == "true" ]]; then
is_labeled_pr=true
is_pr_build=true
build_enabled=true
artifact_retention_days=3
skip_reason=""
elif [[ "$EVENT_NAME" == "pull_request" && "$PR_ACTION" == "labeled" ]]; then
skip_reason="Build disabled: label '${ACTION_LABEL_NAME}' is not 'run-pipeline'."
elif [[ "$EVENT_NAME" == "pull_request" && "$has_run_pipeline_label" != "true" ]]; then
elif [[ "$EVENT_NAME" == "pull_request" && " $PR_LABELS " != *" run-pipeline "* ]]; then
skip_reason="Build disabled: PR does not have the required 'run-pipeline' label."
fi
@ -239,29 +220,29 @@ jobs:
rust_target: 'aarch64-apple-darwin'
dotnet_runtime: 'osx-arm64'
dotnet_name_postfix: '-aarch64-apple-darwin'
tauri_bundle: 'dmg,app,updater'
tauri_bundle: 'dmg,updater'
tauri_bundle_pr: 'dmg'
- platform: 'macos-latest' # for Intel-based macOS
rust_target: 'x86_64-apple-darwin'
dotnet_runtime: 'osx-x64'
dotnet_name_postfix: '-x86_64-apple-darwin'
tauri_bundle: 'dmg,app,updater'
tauri_bundle: 'dmg,updater'
tauri_bundle_pr: 'dmg'
- platform: 'ubuntu-22.04' # for x86-based Linux
rust_target: 'x86_64-unknown-linux-gnu'
dotnet_runtime: 'linux-x64'
dotnet_name_postfix: '-x86_64-unknown-linux-gnu'
tauri_bundle: 'appimage,updater'
tauri_bundle_pr: 'appimage'
tauri_bundle: 'appimage,deb,updater'
tauri_bundle_pr: 'appimage,deb'
- platform: 'ubuntu-22.04-arm' # for ARM-based Linux
rust_target: 'aarch64-unknown-linux-gnu'
dotnet_runtime: 'linux-arm64'
dotnet_name_postfix: '-aarch64-unknown-linux-gnu'
tauri_bundle: 'appimage,updater'
tauri_bundle_pr: 'appimage'
tauri_bundle: 'appimage,deb,updater'
tauri_bundle_pr: 'appimage,deb'
- platform: 'windows-latest' # for x86-based Windows
rust_target: 'x86_64-pc-windows-msvc'
@ -704,9 +685,11 @@ jobs:
uses: actions/cache@v4
with:
path: |
~/.cargo/bin
~/.cargo/git/db/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.rustup/toolchains
runtime/target
key: target-${{ matrix.dotnet_runtime }}-rust-${{ env.RUST_VERSION }}
@ -716,64 +699,42 @@ jobs:
with:
toolchain: ${{ env.RUST_VERSION }}
targets: ${{ matrix.rust_target }}
- name: Cache Tauri CLI
uses: actions/cache@v4
with:
path: ~/.cargo-tauri-cli
key: tauri-cli-v2-${{ runner.os }}-${{ runner.arch }}
- name: Setup dependencies (Ubuntu-specific, x86)
if: matrix.platform == 'ubuntu-22.04' && contains(matrix.rust_target, 'x86_64')
run: |
sudo apt-get update
sudo apt-get install -y libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev patchelf libfuse2 xdg-utils
sudo apt-get install -y libwebkit2gtk-4.0-dev libappindicator3-dev librsvg2-dev patchelf libfuse2
- name: Setup dependencies (Ubuntu-specific, ARM)
if: matrix.platform == 'ubuntu-22.04-arm' && contains(matrix.rust_target, 'aarch64')
run: |
sudo apt-get update
sudo apt-get install -y libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev patchelf libfuse2 xdg-utils
sudo apt-get install -y libwebkit2gtk-4.0-dev libappindicator3-dev librsvg2-dev patchelf libfuse2
- name: Setup Tauri (Unix)
if: matrix.platform != 'windows-latest'
run: |
echo "$HOME/.cargo-tauri-cli/bin" >> "$GITHUB_PATH"
export PATH="$HOME/.cargo-tauri-cli/bin:$PATH"
if ! cargo tauri --version 2>/dev/null | grep -Eq '^tauri-cli 2\.'; then
cargo install tauri-cli --version "^2.11.0" --locked --force --root "$HOME/.cargo-tauri-cli"
if ! cargo tauri --version > /dev/null 2>&1; then
cargo install --version 1.6.2 tauri-cli
else
echo "Tauri CLI v2 is already installed"
echo "Tauri is already installed"
fi
- name: Setup Tauri (Windows)
if: matrix.platform == 'windows-latest'
run: |
"$env:USERPROFILE\.cargo-tauri-cli\bin" >> $env:GITHUB_PATH
$env:PATH = "$env:USERPROFILE\.cargo-tauri-cli\bin;$env:PATH"
$tauriVersion = cargo tauri --version 2>$null
if (-not $tauriVersion -or $tauriVersion -notmatch '^tauri-cli 2\.') {
cargo install tauri-cli --version "^2.11.0" --locked --force --root "$env:USERPROFILE\.cargo-tauri-cli"
if (-not (cargo tauri --version 2>$null)) {
cargo install --version 1.6.2 tauri-cli
} else {
Write-Output "Tauri CLI v2 is already installed"
Write-Output "Tauri is already installed"
}
- name: Delete previous artifact, which may exist due to caching (macOS)
if: startsWith(matrix.platform, 'macos')
run: |
dmg_dir="runtime/target/${{ matrix.rust_target }}/release/bundle/dmg"
macos_dir="runtime/target/${{ matrix.rust_target }}/release/bundle/macos"
if [ -d "$dmg_dir" ]; then
find "$dmg_dir" -maxdepth 1 -name 'MindWork AI Studio_*.dmg' -delete
fi
if [ -d "$macos_dir" ]; then
find "$macos_dir" -maxdepth 1 -name '*.app' -exec rm -rf {} +
find "$macos_dir" -maxdepth 1 -name '*.app.tar.gz*' -delete
fi
rm -f runtime/target/${{ matrix.rust_target }}/release/bundle/dmg/MindWork AI Studio_*.dmg
rm -f runtime/target/${{ matrix.rust_target }}/release/bundle/macos/MindWork AI Studio.app.tar.gz*
- name: Delete previous artifact, which may exist due to caching (Windows - MSI)
if: startsWith(matrix.platform, 'windows') && contains(matrix.tauri_bundle, 'msi')
@ -787,11 +748,16 @@ jobs:
rm -Force "runtime/target/${{ matrix.rust_target }}/release/bundle/nsis/MindWork AI Studio_*.exe" -ErrorAction SilentlyContinue
rm -Force "runtime/target/${{ matrix.rust_target }}/release/bundle/nsis/MindWork AI Studio*nsis.zip*" -ErrorAction SilentlyContinue
- name: Delete previous artifact, which may exist due to caching (Linux - Debian Package)
if: startsWith(matrix.platform, 'ubuntu') && contains(matrix.tauri_bundle, 'deb')
run: |
rm -f runtime/target/${{ matrix.rust_target }}/release/bundle/deb/mind-work-ai-studio_*.deb
- name: Delete previous artifact, which may exist due to caching (Linux - AppImage)
if: startsWith(matrix.platform, 'ubuntu') && contains(matrix.tauri_bundle, 'appimage')
run: |
rm -f runtime/target/${{ matrix.rust_target }}/release/bundle/appimage/*.AppImage
rm -f runtime/target/${{ matrix.rust_target }}/release/bundle/appimage/*.AppImage.tar.gz*
rm -f runtime/target/${{ matrix.rust_target }}/release/bundle/appimage/mind-work-ai-studio_*.AppImage
rm -f runtime/target/${{ matrix.rust_target }}/release/bundle/appimage/mind-work-ai-studio*AppImage.tar.gz*
- name: Build Tauri project (Unix)
if: matrix.platform != 'windows-latest'
@ -800,39 +766,17 @@ jobs:
PRIVATE_PUBLISH_KEY_PASSWORD: ${{ secrets.PRIVATE_PUBLISH_KEY_PASSWORD }}
run: |
bundles="${{ matrix.tauri_bundle }}"
tauri_config_args=()
if [ "${{ needs.determine_run_mode.outputs.is_pr_build }}" = "true" ]; then
echo "Running PR test build without updater bundle signing"
bundles="${{ matrix.tauri_bundle_pr }}"
tauri_config_args=(--config '{"bundle":{"createUpdaterArtifacts":false}}')
else
export TAURI_SIGNING_PRIVATE_KEY="$PRIVATE_PUBLISH_KEY"
export TAURI_SIGNING_PRIVATE_KEY_PASSWORD="$PRIVATE_PUBLISH_KEY_PASSWORD"
export TAURI_PRIVATE_KEY="$PRIVATE_PUBLISH_KEY"
export TAURI_KEY_PASSWORD="$PRIVATE_PUBLISH_KEY_PASSWORD"
fi
cd runtime
cargo tauri build --target ${{ matrix.rust_target }} --bundles "$bundles" "${tauri_config_args[@]}"
if [ "${{ needs.determine_run_mode.outputs.is_pr_build }}" = "true" ]; then
updater_artifact_count=$(find target/${{ matrix.rust_target }}/release/bundle -type f \( -name '*.app.tar.gz*' -o -name '*.AppImage.tar.gz*' -o -name '*nsis.zip*' \) | wc -l)
if [ "$updater_artifact_count" -ne 0 ]; then
echo "PR builds must not generate updater artifacts."
find target/${{ matrix.rust_target }}/release/bundle -type f \( -name '*.app.tar.gz*' -o -name '*.AppImage.tar.gz*' -o -name '*nsis.zip*' \)
exit 1
fi
fi
if [ "${{ needs.determine_run_mode.outputs.is_pr_build }}" != "true" ] && [[ "${{ matrix.platform }}" == macos* ]]; then
app_update_archive_count=$(find target/${{ matrix.rust_target }}/release/bundle/macos -maxdepth 1 -name '*.app.tar.gz' | wc -l)
app_update_signature_count=$(find target/${{ matrix.rust_target }}/release/bundle/macos -maxdepth 1 -name '*.app.tar.gz.sig' | wc -l)
if [ "$app_update_archive_count" -eq 0 ] || [ "$app_update_signature_count" -eq 0 ]; then
echo "Expected macOS updater artifacts were not generated."
exit 1
fi
fi
cargo tauri build --target ${{ matrix.rust_target }} --bundles "$bundles"
- name: Build Tauri project (Windows)
if: matrix.platform == 'windows-latest'
@ -841,29 +785,17 @@ jobs:
PRIVATE_PUBLISH_KEY_PASSWORD: ${{ secrets.PRIVATE_PUBLISH_KEY_PASSWORD }}
run: |
$bundles = "${{ matrix.tauri_bundle }}"
$tauriConfigArgs = @()
if ("${{ needs.determine_run_mode.outputs.is_pr_build }}" -eq "true") {
Write-Output "Running PR test build without updater bundle signing"
$bundles = "${{ matrix.tauri_bundle_pr }}"
$tauriConfigArgs = @("--config", '{"bundle":{"createUpdaterArtifacts":false}}')
} else {
$env:TAURI_SIGNING_PRIVATE_KEY="$env:PRIVATE_PUBLISH_KEY"
$env:TAURI_SIGNING_PRIVATE_KEY_PASSWORD="$env:PRIVATE_PUBLISH_KEY_PASSWORD"
$env:TAURI_PRIVATE_KEY="$env:PRIVATE_PUBLISH_KEY"
$env:TAURI_KEY_PASSWORD="$env:PRIVATE_PUBLISH_KEY_PASSWORD"
}
cd runtime
cargo tauri build --target ${{ matrix.rust_target }} --bundles $bundles @tauriConfigArgs
if ("${{ needs.determine_run_mode.outputs.is_pr_build }}" -eq "true") {
$updaterArtifacts = Get-ChildItem -Path "target/${{ matrix.rust_target }}/release/bundle" -Recurse -File -Include "*.app.tar.gz*", "*.AppImage.tar.gz*", "*nsis.zip*" -ErrorAction SilentlyContinue
if ($updaterArtifacts.Count -ne 0) {
Write-Error "PR builds must not generate updater artifacts."
$updaterArtifacts | ForEach-Object { Write-Error $_.FullName }
exit 1
}
}
cargo tauri build --target ${{ matrix.rust_target }} --bundles $bundles
- name: Upload artifact (macOS)
if: startsWith(matrix.platform, 'macos')
@ -872,7 +804,7 @@ jobs:
name: MindWork AI Studio (macOS ${{ matrix.dotnet_runtime }})
path: |
runtime/target/${{ matrix.rust_target }}/release/bundle/dmg/MindWork AI Studio_*.dmg
runtime/target/${{ matrix.rust_target }}/release/bundle/macos/*.app.tar.gz*
runtime/target/${{ matrix.rust_target }}/release/bundle/macos/MindWork AI Studio.app.tar.gz*
if-no-files-found: error
retention-days: ${{ fromJSON(needs.determine_run_mode.outputs.artifact_retention_days) }}
@ -898,14 +830,24 @@ jobs:
if-no-files-found: error
retention-days: ${{ fromJSON(needs.determine_run_mode.outputs.artifact_retention_days) }}
- name: Upload artifact (Linux - Debian Package)
if: startsWith(matrix.platform, 'ubuntu') && contains(matrix.tauri_bundle, 'deb')
uses: actions/upload-artifact@v4
with:
name: MindWork AI Studio (Linux - deb ${{ matrix.dotnet_runtime }})
path: |
runtime/target/${{ matrix.rust_target }}/release/bundle/deb/mind-work-ai-studio_*.deb
if-no-files-found: error
retention-days: ${{ fromJSON(needs.determine_run_mode.outputs.artifact_retention_days) }}
- name: Upload artifact (Linux - AppImage)
if: startsWith(matrix.platform, 'ubuntu') && contains(matrix.tauri_bundle, 'appimage')
uses: actions/upload-artifact@v4
with:
name: MindWork AI Studio (Linux - AppImage ${{ matrix.dotnet_runtime }})
path: |
runtime/target/${{ matrix.rust_target }}/release/bundle/appimage/*.AppImage
runtime/target/${{ matrix.rust_target }}/release/bundle/appimage/*.AppImage.tar.gz*
runtime/target/${{ matrix.rust_target }}/release/bundle/appimage/mind-work-ai-studio_*.AppImage
runtime/target/${{ matrix.rust_target }}/release/bundle/appimage/mind-work-ai-studio*AppImage.tar.gz*
if-no-files-found: error
retention-days: ${{ fromJSON(needs.determine_run_mode.outputs.artifact_retention_days) }}
@ -941,14 +883,14 @@ jobs:
# Find and process files in the artifacts directory:
find "$GITHUB_WORKSPACE/artifacts" -type f | while read -r FILE; do
if [[ "$FILE" == *"osx-x64"* && "$FILE" == *".tar.gz.sig" ]]; then
TARGET_NAME="MindWork AI Studio_x64.app.tar.gz.sig"
elif [[ "$FILE" == *"osx-x64"* && "$FILE" == *".tar.gz" ]]; then
if [[ "$FILE" == *"osx-x64"* && "$FILE" == *".tar.gz" ]]; then
TARGET_NAME="MindWork AI Studio_x64.app.tar.gz"
elif [[ "$FILE" == *"osx-arm64"* && "$FILE" == *".tar.gz.sig" ]]; then
TARGET_NAME="MindWork AI Studio_aarch64.app.tar.gz.sig"
elif [[ "$FILE" == *"osx-x64"* && "$FILE" == *".tar.gz.sig" ]]; then
TARGET_NAME="MindWork AI Studio_x64.app.tar.gz.sig"
elif [[ "$FILE" == *"osx-arm64"* && "$FILE" == *".tar.gz" ]]; then
TARGET_NAME="MindWork AI Studio_aarch64.app.tar.gz"
elif [[ "$FILE" == *"osx-arm64"* && "$FILE" == *".tar.gz.sig" ]]; then
TARGET_NAME="MindWork AI Studio_aarch64.app.tar.gz.sig"
else
TARGET_NAME="$(basename "$FILE")"
TARGET_NAME=$(echo "$TARGET_NAME" | sed "s/_${VERSION}//")
@ -999,9 +941,9 @@ jobs:
platform="linux-x86_64"
elif [[ "$sig_file" == *"aarch64.AppImage"* ]]; then
platform="linux-aarch64"
elif [[ "$sig_file" == *"x64-setup"* ]]; then
elif [[ "$sig_file" == *"x64-setup.nsis"* ]]; then
platform="windows-x86_64"
elif [[ "$sig_file" == *"arm64-setup"* ]]; then
elif [[ "$sig_file" == *"arm64-setup.nsis"* ]]; then
platform="windows-aarch64"
else
echo "Platform not recognized: '$sig_file'"
@ -1065,13 +1007,6 @@ jobs:
exit 1
fi
for platform in darwin-aarch64 darwin-x86_64 linux-aarch64 linux-x86_64 windows-aarch64 windows-x86_64; do
if ! jq -e --arg platform "$platform" '.platforms[$platform]' $GITHUB_WORKSPACE/release/assets/latest.json > /dev/null; then
echo "The generated latest.json is missing platform '$platform'."
exit 1
fi
done
- name: Show all release assets
run: ls -Rlhat $GITHUB_WORKSPACE/release/assets

3
.gitignore vendored
View File

@ -169,6 +169,3 @@ orleans.codegen.cs
# Ignore GitHub Copilot migration files:
**/copilot.data.migration.*.xml
# Tauri generated schemas/manifests
/runtime/gen/

View File

@ -28,11 +28,12 @@ Since November 2024: Work on RAG (integration of your data and files) has begun.
- [x] ~~App: Implement an [ERI](https://github.com/MindWorkAI/ERI) server coding assistant (PR [#231](https://github.com/MindWorkAI/AI-Studio/pull/231))~~
- [x] ~~App: Management of data sources (local & external data via [ERI](https://github.com/MindWorkAI/ERI)) (PR [#259](https://github.com/MindWorkAI/AI-Studio/pull/259), [#273](https://github.com/MindWorkAI/AI-Studio/pull/273))~~
- [x] ~~Runtime: Extract data from txt / md / pdf / docx / xlsx files (PR [#374](https://github.com/MindWorkAI/AI-Studio/pull/374))~~
- [ ] (*Optional*) Runtime: Implement internal embedding provider through [fastembed-rs](https://github.com/Anush008/fastembed-rs)
- [x] ~~App: Implement dialog for checking & handling [pandoc](https://pandoc.org/) installation ([PR #393](https://github.com/MindWorkAI/AI-Studio/pull/393), [PR #487](https://github.com/MindWorkAI/AI-Studio/pull/487))~~
- [x] ~~App: Implement external embedding providers ([PR #654](https://github.com/MindWorkAI/AI-Studio/pull/654))~~
- [ ] App: Implement the process to vectorize one local file using embeddings (PR [#756](https://github.com/MindWorkAI/AI-Studio/pull/756))
- [ ] App: Implement the process to vectorize one local file using embeddings
- [x] ~~Runtime: Integration of the vector database [Qdrant](https://github.com/qdrant/qdrant) ([PR #580](https://github.com/MindWorkAI/AI-Studio/pull/580))~~
- [ ] App: Implement the continuous process of vectorizing data (PR [#756](https://github.com/MindWorkAI/AI-Studio/pull/756))
- [ ] App: Implement the continuous process of vectorizing data
- [x] ~~App: Define a common retrieval context interface for the integration of RAG processes in chats (PR [#281](https://github.com/MindWorkAI/AI-Studio/pull/281), [#284](https://github.com/MindWorkAI/AI-Studio/pull/284), [#286](https://github.com/MindWorkAI/AI-Studio/pull/286), [#287](https://github.com/MindWorkAI/AI-Studio/pull/287))~~
- [x] ~~App: Define a common augmentation interface for the integration of RAG processes in chats (PR [#288](https://github.com/MindWorkAI/AI-Studio/pull/288), [#289](https://github.com/MindWorkAI/AI-Studio/pull/289))~~
- [x] ~~App: Integrate data sources in chats (PR [#282](https://github.com/MindWorkAI/AI-Studio/pull/282))~~

View File

@ -245,7 +245,7 @@ public sealed partial class UpdateMetadataCommands
Console.WriteLine("- Start building the Rust runtime ...");
var pathRuntime = Environment.GetRustRuntimeDirectory();
var rustBuildOutput = await this.ReadCommandOutput(pathRuntime, "cargo", "tauri build --no-bundle", true);
var rustBuildOutput = await this.ReadCommandOutput(pathRuntime, "cargo", "tauri build --bundles none", true);
var rustBuildOutputLines = rustBuildOutput.Split([global::System.Environment.NewLine], StringSplitOptions.RemoveEmptyEntries);
var foundRustIssue = false;
foreach (var buildOutputLine in rustBuildOutputLines)

View File

@ -6019,12 +6019,18 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1890416390"] = "Check for update
-- Vision
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1892426825"] = "Vision"
-- In order to use any LLM, each user must store their so-called API key for each LLM provider. This key must be kept secure, similar to a password. The safest way to do this is offered by operating systems like macOS, Windows, and Linux: They have mechanisms to store such data, if available, on special security hardware. Since this is currently not possible in .NET, we use this Rust library.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1915240766"] = "In order to use any LLM, each user must store their so-called API key for each LLM provider. This key must be kept secure, similar to a password. The safest way to do this is offered by operating systems like macOS, Windows, and Linux: They have mechanisms to store such data, if available, on special security hardware. Since this is currently not possible in .NET, we use this Rust library."
-- This library is used to convert HTML to Markdown. This is necessary, e.g., when you provide a URL as input for an assistant.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1924365263"] = "This library is used to convert HTML to Markdown. This is necessary, e.g., when you provide a URL as input for an assistant."
-- Encryption secret: is configured
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1931141322"] = "Encryption secret: is configured"
-- We use Rocket to implement the runtime API. This is necessary because the runtime must be able to communicate with the user interface (IPC). Rocket is a great framework for implementing web APIs in Rust.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1943216839"] = "We use Rocket to implement the runtime API. This is necessary because the runtime must be able to communicate with the user interface (IPC). Rocket is a great framework for implementing web APIs in Rust."
-- Copies the following to the clipboard
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T2029659664"] = "Copies the following to the clipboard"
@ -6127,12 +6133,6 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3178730036"] = "Have feature ide
-- Hide Details
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3183837919"] = "Hide Details"
-- Axum server runs the internal axum service over a secure local connection. This helps AI Studio protect the communication between the Rust runtime and the user interface.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3208719461"] = "Axum server runs the internal axum service over a secure local connection. This helps AI Studio protect the communication between the Rust runtime and the user interface."
-- Rustls helps secure the internal connection between the app's user interface and the Rust runtime. This protects the local communication that AI Studio needs while it is running.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3239817808"] = "Rustls helps secure the internal connection between the app's user interface and the Rust runtime. This protects the local communication that AI Studio needs while it is running."
-- Update Pandoc
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3249965383"] = "Update Pandoc"
@ -6157,9 +6157,6 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3449345633"] = "AI Studio runs w
-- Tauri is used to host the Blazor user interface. It is a great project that allows the creation of desktop applications using web technologies. I love Tauri!
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3494984593"] = "Tauri is used to host the Blazor user interface. It is a great project that allows the creation of desktop applications using web technologies. I love Tauri!"
-- AI Studio stores secrets like API keys in your operating systems secure credential store. The keyring-core library handles this by connecting to macOS Keychain, Windows Credential Manager, and Linux Secret Service.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3527399572"] = "AI Studio stores secrets like API keys in your operating systems secure credential store. The keyring-core library handles this by connecting to macOS Keychain, Windows Credential Manager, and Linux Secret Service."
-- Motivation
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3563271893"] = "Motivation"
@ -6229,9 +6226,6 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T836298648"] = "Provided by confi
-- We use this library to be able to read PowerPoint files. This allows us to insert content from slides into prompts and take PowerPoint files into account in RAG processes. We thank Nils Kruthoff for his work on this Rust crate.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T855925638"] = "We use this library to be able to read PowerPoint files. This allows us to insert content from slides into prompts and take PowerPoint files into account in RAG processes. We thank Nils Kruthoff for his work on this Rust crate."
-- Axum is used to provide the small internal service that connects the Rust runtime with the app's user interface. This lets both parts of AI Studio exchange information while the app is running.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T864851737"] = "Axum is used to provide the small internal service that connects the Rust runtime with the app's user interface. This lets both parts of AI Studio exchange information while the app is running."
-- For some data transfers, we need to encode the data in base64. This Rust library is great for this purpose.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T870640199"] = "For some data transfers, we need to encode the data in base64. This Rust library is great for this purpose."
@ -6676,8 +6670,8 @@ UI_TEXT_CONTENT["AISTUDIO::SETTINGS::DATAMODEL::PREVIEWFEATURESEXTENSIONS::T2708
-- Unknown preview feature
UI_TEXT_CONTENT["AISTUDIO::SETTINGS::DATAMODEL::PREVIEWFEATURESEXTENSIONS::T2722827307"] = "Unknown preview feature"
-- Transcription: Convert recordings and audio files into text
UI_TEXT_CONTENT["AISTUDIO::SETTINGS::DATAMODEL::PREVIEWFEATURESEXTENSIONS::T4247148645"] = "Transcription: Convert recordings and audio files into text"
-- Transcription: Preview of our speech to text system where you can transcribe recordings and audio files into text
UI_TEXT_CONTENT["AISTUDIO::SETTINGS::DATAMODEL::PREVIEWFEATURESEXTENSIONS::T714355911"] = "Transcription: Preview of our speech to text system where you can transcribe recordings and audio files into text"
-- Use no data sources, when sending an assistant result to a chat
UI_TEXT_CONTENT["AISTUDIO::SETTINGS::DATAMODEL::SENDTOCHATDATASOURCEBEHAVIOREXTENSIONS::T1223925477"] = "Use no data sources, when sending an assistant result to a chat"
@ -6973,12 +6967,6 @@ UI_TEXT_CONTENT["AISTUDIO::TOOLS::ERICLIENT::ERICLIENTV1::T816853779"] = "Failed
-- Failed to retrieve the authentication methods: the ERI server did not return a valid response.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::ERICLIENT::ERICLIENTV1::T984407320"] = "Failed to retrieve the authentication methods: the ERI server did not return a valid response."
-- AI Studio couldn't install Pandoc because the archive was not found.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T1059477764"] = "AI Studio couldn't install Pandoc because the archive was not found."
-- Pandoc doesn't seem to be installed.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T1090474732"] = "Pandoc doesn't seem to be installed."
-- Was not able to validate the Pandoc installation.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T1364844008"] = "Was not able to validate the Pandoc installation."
@ -7000,20 +6988,20 @@ UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T2550598062"] = "Pandoc v{0} is instal
-- Pandoc v{0} is installed, but it does not match the required version (v{1}).
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T2555465873"] = "Pandoc v{0} is installed, but it does not match the required version (v{1})."
-- AI Studio couldn't install Pandoc because the archive type is unknown.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T3492710362"] = "AI Studio couldn't install Pandoc because the archive type is unknown."
-- Pandoc was not installed successfully, because the archive was not found.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T34210248"] = "Pandoc was not installed successfully, because the archive was not found."
-- Pandoc is not available on the system or the process had issues.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T3746116957"] = "Pandoc is not available on the system or the process had issues."
-- AI Studio couldn't install Pandoc because the executable was not found in the archive.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T403983772"] = "AI Studio couldn't install Pandoc because the executable was not found in the archive."
-- Pandoc was not installed successfully, because the archive type is unknown.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T3962211670"] = "Pandoc was not installed successfully, because the archive type is unknown."
-- AI Studio couldn't find the latest Pandoc version and will install version {0} instead.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T695293525"] = "AI Studio couldn't find the latest Pandoc version and will install version {0} instead."
-- It seems that Pandoc is not installed.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T567205144"] = "It seems that Pandoc is not installed."
-- AI Studio couldn't install Pandoc.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T932858631"] = "AI Studio couldn't install Pandoc."
-- The latest Pandoc version was not found, installing version {0} instead.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T726914939"] = "The latest Pandoc version was not found, installing version {0} instead."
-- Pandoc is required for Microsoft Word export.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOCEXPORT::T1473115556"] = "Pandoc is required for Microsoft Word export."

View File

@ -13,9 +13,6 @@ public partial class Changelog
public static readonly Log[] LOGS =
[
new (239, "v26.5.4, build 239 (2026-05-13 11:58 UTC)", "v26.5.4.md"),
new (238, "v26.5.3, build 238 (2026-05-13 09:50 UTC)", "v26.5.3.md"),
new (237, "v26.5.2, build 237 (2026-05-06 16:38 UTC)", "v26.5.2.md"),
new (236, "v26.5.1, build 236 (2026-05-06 13:06 UTC)", "v26.5.1.md"),
new (235, "v26.4.1, build 235 (2026-04-17 17:25 UTC)", "v26.4.1.md"),
new (234, "v26.2.2, build 234 (2026-02-22 14:16 UTC)", "v26.2.2.md"),

View File

@ -5,6 +5,7 @@
@if (PreviewFeatures.PRE_SPEECH_TO_TEXT_2026.IsEnabled(this.SettingsManager))
{
<ExpansionPanel HeaderIcon="@Icons.Material.Filled.VoiceChat" HeaderText="@T("Configure Transcription Providers")">
<PreviewBeta ApplyInnerScrollingFix="true"/>
<MudText Typo="Typo.h4" Class="mb-3">
@T("Configured Transcription Providers")
</MudText>

View File

@ -50,12 +50,12 @@
<ItemGroup>
<PackageReference Include="CodeBeam.MudBlazor.Extensions" Version="8.3.0" />
<PackageReference Include="HtmlAgilityPack" Version="1.12.4" />
<PackageReference Include="Microsoft.Extensions.FileProviders.Embedded" Version="9.0.16" />
<PackageReference Include="Microsoft.Extensions.FileProviders.Embedded" Version="9.0.15" />
<PackageReference Include="MudBlazor" Version="8.15.0" />
<PackageReference Include="MudBlazor.Markdown" Version="8.11.0" />
<PackageReference Include="Qdrant.Client" Version="1.18.1" />
<PackageReference Include="Qdrant.Client" Version="1.17.0" />
<PackageReference Include="ReverseMarkdown" Version="5.0.0" />
<PackageReference Include="LuaCSharp" Version="0.5.5" />
<PackageReference Include="LuaCSharp" Version="0.5.3" />
</ItemGroup>
<ItemGroup>

View File

@ -279,12 +279,10 @@
<ThirdPartyComponent Name="Rust" Developer="Graydon Hoare, Rust Foundation, Rust developers & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/rust-lang/rust/blob/master/LICENSE-MIT" RepositoryUrl="https://github.com/rust-lang/rust" UseCase="@T("The .NET backend cannot be started as a desktop app. Therefore, I use a second backend in Rust, which I call runtime. With Rust as the runtime, Tauri can be used to realize a typical desktop app. Thanks to Rust, this app can be offered for Windows, macOS, and Linux desktops. Rust is a great language for developing safe and high-performance software.")"/>
<ThirdPartyComponent Name="Tauri" Developer="Daniel Thompson-Yvetot, Lucas Nogueira, Tensor, Boscop, Serge Zaitsev, George Burton & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/tauri-apps/tauri/blob/dev/LICENSE_MIT" RepositoryUrl="https://github.com/tauri-apps/tauri" UseCase="@T("Tauri is used to host the Blazor user interface. It is a great project that allows the creation of desktop applications using web technologies. I love Tauri!")"/>
<ThirdPartyComponent Name="Qdrant" Developer="Andrey Vasnetsov, Tim Visée, Arnaud Gourlay, Luis Cossío, Ivan Pleshkov, Roman Titov, xzfc, JojiiOfficial & Open Source Community" LicenseName="Apache-2.0" LicenseUrl="https://github.com/qdrant/qdrant/blob/master/LICENSE" RepositoryUrl="https://github.com/qdrant/qdrant" UseCase="@T("Qdrant is a vector database and vector similarity search engine. We use it to realize local RAG—retrieval-augmented generation—within AI Studio. Thanks for the effort and great work that has been and is being put into Qdrant.")"/>
<ThirdPartyComponent Name="axum" Developer="David Pedersen, Jonas Platte, tottoto, David Mládek, Yann Simon, Tobias Bieniek, Open Source Community & Tokio Project" LicenseName="MIT" LicenseUrl="https://github.com/tokio-rs/axum/blob/main/LICENSE" RepositoryUrl="https://github.com/tokio-rs/axum" UseCase="@T("Axum is used to provide the small internal service that connects the Rust runtime with the app's user interface. This lets both parts of AI Studio exchange information while the app is running.")"/>
<ThirdPartyComponent Name="axum-server" Developer="Eray Karatay, Adi Salimgereyev, daxpedda & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/programatik29/axum-server/blob/master/LICENSE" RepositoryUrl="https://github.com/programatik29/axum-server" UseCase="@T("Axum server runs the internal axum service over a secure local connection. This helps AI Studio protect the communication between the Rust runtime and the user interface.")"/>
<ThirdPartyComponent Name="Rustls" Developer="Joe Birr-Pixton, Dirkjan Ochtman, Daniel McCarney, Brian Smith, Jacob Hoffman-Andrews, Jorge Aparicio & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/rustls/rustls/blob/main/LICENSE-MIT" RepositoryUrl="https://github.com/rustls/rustls" UseCase="@T("Rustls helps secure the internal connection between the app's user interface and the Rust runtime. This protects the local communication that AI Studio needs while it is running.")"/>
<ThirdPartyComponent Name="Rocket" Developer="Sergio Benitez & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/rwf2/Rocket/blob/master/LICENSE-MIT" RepositoryUrl="https://github.com/rwf2/Rocket" UseCase="@T("We use Rocket to implement the runtime API. This is necessary because the runtime must be able to communicate with the user interface (IPC). Rocket is a great framework for implementing web APIs in Rust.")"/>
<ThirdPartyComponent Name="serde" Developer="Erick Tryzelaar, David Tolnay & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/serde-rs/serde/blob/master/LICENSE-MIT" RepositoryUrl="https://github.com/serde-rs/serde" UseCase="@T("Now we have multiple systems, some developed in .NET and others in Rust. The data format JSON is responsible for translating data between both worlds (called data serialization and deserialization). Serde takes on this task in the Rust world. The counterpart in the .NET world is an integral part of .NET and is located in System.Text.Json.")"/>
<ThirdPartyComponent Name="strum_macros" Developer="Peter Glotfelty & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/Peternator7/strum/blob/master/LICENSE" RepositoryUrl="https://github.com/Peternator7/strum" UseCase="@T("This crate provides derive macros for Rust enums, which we use to reduce boilerplate when implementing string conversions and metadata for runtime types. This is helpful for the communication between our Rust and .NET systems.")"/>
<ThirdPartyComponent Name="keyring-core" Developer="Daniel Brotsky & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/open-source-cooperative/keyring-core/blob/main/LICENSE-MIT" RepositoryUrl="https://github.com/open-source-cooperative/keyring-core" UseCase="@T("AI Studio stores secrets like API keys in your operating systems secure credential store. The keyring-core library handles this by connecting to macOS Keychain, Windows Credential Manager, and Linux Secret Service.")"/>
<ThirdPartyComponent Name="keyring" Developer="Walther Chen, Daniel Brotsky & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/hwchen/keyring-rs/blob/master/LICENSE-MIT" RepositoryUrl="https://github.com/hwchen/keyring-rs" UseCase="@T("In order to use any LLM, each user must store their so-called API key for each LLM provider. This key must be kept secure, similar to a password. The safest way to do this is offered by operating systems like macOS, Windows, and Linux: They have mechanisms to store such data, if available, on special security hardware. Since this is currently not possible in .NET, we use this Rust library.")"/>
<ThirdPartyComponent Name="arboard" Developer="Artur Kovacs, Avi Weinstock, 1Password & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/1Password/arboard/blob/master/LICENSE-MIT.txt" RepositoryUrl="https://github.com/1Password/arboard" UseCase="@T("To be able to use the responses of the LLM in other apps, we often use the clipboard of the respective operating system. Unfortunately, in .NET there is no solution that works with all operating systems. Therefore, I have opted for this library in Rust. This way, data transfer to other apps works on every system.")"/>
<ThirdPartyComponent Name="tokio" Developer="Alex Crichton, Carl Lerche, Alice Ryhl, Taiki Endo, Ivan Petkov, Eliza Weisman, Lucio Franco & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/tokio-rs/tokio/blob/master/LICENSE" RepositoryUrl="https://github.com/tokio-rs/tokio" UseCase="@T("Code in the Rust language can be specified as synchronous or asynchronous. Unlike .NET and the C# language, Rust cannot execute asynchronous code by itself. Rust requires support in the form of an executor for this. Tokio is one such executor.")"/>
<ThirdPartyComponent Name="futures" Developer="Alex Crichton, Taiki Endo, Taylor Cramer, Nemo157, Josef Brandl, Aaron Turon & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/rust-lang/futures-rs/blob/master/LICENSE-MIT" RepositoryUrl="https://github.com/rust-lang/futures-rs" UseCase="@T("This is a library providing the foundations for asynchronous programming in Rust. It includes key trait definitions like Stream, as well as utilities like join!, select!, and various futures combinator methods which enable expressive asynchronous control flow.")"/>
@ -314,4 +312,4 @@
</ExpansionPanel>
</MudExpansionPanels>
</InnerScrolling>
</div>
</div>

View File

@ -173,8 +173,8 @@ CONFIG["SETTINGS"] = {}
-- Configure the enabled preview features:
-- Allowed values are can be found in https://github.com/MindWorkAI/AI-Studio/app/MindWork%20AI%20Studio/Settings/DataModel/PreviewFeatures.cs
-- Examples are PRE_WRITER_MODE_2024 and PRE_RAG_2024.
-- CONFIG["SETTINGS"]["DataApp.EnabledPreviewFeatures"] = { "PRE_RAG_2024" }
-- Examples are PRE_WRITER_MODE_2024, PRE_RAG_2024, PRE_SPEECH_TO_TEXT_2026.
-- CONFIG["SETTINGS"]["DataApp.EnabledPreviewFeatures"] = { "PRE_RAG_2024", "PRE_SPEECH_TO_TEXT_2026" }
-- Configure the preselected provider.
-- It must be one of the provider IDs defined in CONFIG["LLM_PROVIDERS"].

View File

@ -6021,12 +6021,18 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1890416390"] = "Nach Updates suc
-- Vision
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1892426825"] = "Vision"
-- In order to use any LLM, each user must store their so-called API key for each LLM provider. This key must be kept secure, similar to a password. The safest way to do this is offered by operating systems like macOS, Windows, and Linux: They have mechanisms to store such data, if available, on special security hardware. Since this is currently not possible in .NET, we use this Rust library.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1915240766"] = "Um ein beliebiges LLM nutzen zu können, muss jeder User seinen sogenannten API-Schlüssel für jeden LLM-Anbieter speichern. Dieser Schlüssel muss sicher aufbewahrt werden ähnlich wie ein Passwort. Die sicherste Methode hierfür bieten Betriebssysteme wie macOS, Windows und Linux: Sie verfügen über Mechanismen, solche Daten sofern vorhanden auf spezieller Sicherheits-Hardware zu speichern. Da dies derzeit in .NET nicht möglich ist, verwenden wir diese Rust-Bibliothek."
-- This library is used to convert HTML to Markdown. This is necessary, e.g., when you provide a URL as input for an assistant.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1924365263"] = "Diese Bibliothek wird verwendet, um HTML in Markdown umzuwandeln. Das ist zum Beispiel notwendig, wenn Sie eine URL als Eingabe für einen Assistenten angeben."
-- Encryption secret: is configured
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1931141322"] = "Geheimnis für die Verschlüsselung: ist konfiguriert"
-- We use Rocket to implement the runtime API. This is necessary because the runtime must be able to communicate with the user interface (IPC). Rocket is a great framework for implementing web APIs in Rust.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1943216839"] = "Wir verwenden Rocket zur Implementierung der Runtime-API. Dies ist notwendig, da die Runtime mit der Benutzeroberfläche (IPC) kommunizieren muss. Rocket ist ein ausgezeichnetes Framework zur Umsetzung von Web-APIs in Rust."
-- Copies the following to the clipboard
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T2029659664"] = "Kopiert Folgendes in die Zwischenablage"
@ -6129,12 +6135,6 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3178730036"] = "Haben Sie Ideen
-- Hide Details
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3183837919"] = "Details ausblenden"
-- Axum server runs the internal axum service over a secure local connection. This helps AI Studio protect the communication between the Rust runtime and the user interface.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3208719461"] = "Der Axum-Server führt den internen Axum-Dienst über eine sichere lokale Verbindung aus. Dadurch kann AI Studio die Kommunikation zwischen der Rust-Laufzeitumgebung und der Benutzeroberfläche schützen."
-- Rustls helps secure the internal connection between the app's user interface and the Rust runtime. This protects the local communication that AI Studio needs while it is running.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3239817808"] = "Rustls hilft dabei, die interne Verbindung zwischen der Benutzeroberfläche der App und der Rust-Laufzeitumgebung abzusichern. Dadurch wird die lokale Kommunikation geschützt, die AI Studio während der Ausführung benötigt."
-- Update Pandoc
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3249965383"] = "Pandoc aktualisieren"
@ -6159,9 +6159,6 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3449345633"] = "AI Studio wird m
-- Tauri is used to host the Blazor user interface. It is a great project that allows the creation of desktop applications using web technologies. I love Tauri!
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3494984593"] = "Tauri wird verwendet, um die Blazor-Benutzeroberfläche bereitzustellen. Es ist ein großartiges Projekt, das die Erstellung von Desktop-Anwendungen mit Webtechnologien ermöglicht. Ich liebe Tauri!"
-- AI Studio stores secrets like API keys in your operating systems secure credential store. The keyring-core library handles this by connecting to macOS Keychain, Windows Credential Manager, and Linux Secret Service.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3527399572"] = "AI Studio speichert vertrauliche Daten wie API-Schlüssel im sicheren Speicher Ihres Betriebssystems. Die Bibliothek keyring-core übernimmt dies, indem sie eine Verbindung zum macOS-Schlüsselbund, zur Windows-Anmeldeinformationsverwaltung und zum Linux Secret Service herstellt."
-- Motivation
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3563271893"] = "Motivation"
@ -6231,9 +6228,6 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T836298648"] = "Bereitgestellt vo
-- We use this library to be able to read PowerPoint files. This allows us to insert content from slides into prompts and take PowerPoint files into account in RAG processes. We thank Nils Kruthoff for his work on this Rust crate.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T855925638"] = "Wir verwenden diese Bibliothek, um PowerPoint-Dateien lesen zu können. So ist es möglich, Inhalte aus Folien in Prompts einzufügen und PowerPoint-Dateien in RAG-Prozessen zu berücksichtigen. Wir danken Nils Kruthoff für seine Arbeit an diesem Rust-Crate."
-- Axum is used to provide the small internal service that connects the Rust runtime with the app's user interface. This lets both parts of AI Studio exchange information while the app is running.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T864851737"] = "Axum wird verwendet, um den kleinen internen Dienst bereitzustellen, der die Rust-Laufzeitumgebung mit der Benutzeroberfläche der App verbindet. So können beide Teile von AI Studio Informationen austauschen, während die App läuft."
-- For some data transfers, we need to encode the data in base64. This Rust library is great for this purpose.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T870640199"] = "Für einige Datenübertragungen müssen wir die Daten in Base64 kodieren. Diese Rust-Bibliothek eignet sich dafür hervorragend."
@ -6678,8 +6672,8 @@ UI_TEXT_CONTENT["AISTUDIO::SETTINGS::DATAMODEL::PREVIEWFEATURESEXTENSIONS::T2708
-- Unknown preview feature
UI_TEXT_CONTENT["AISTUDIO::SETTINGS::DATAMODEL::PREVIEWFEATURESEXTENSIONS::T2722827307"] = "Unbekannte Vorschau-Funktion"
-- Transcription: Convert recordings and audio files into text
UI_TEXT_CONTENT["AISTUDIO::SETTINGS::DATAMODEL::PREVIEWFEATURESEXTENSIONS::T4247148645"] = "Transkription: Aufnahmen und Audiodateien in Text umwandeln"
-- Transcription: Preview of our speech to text system where you can transcribe recordings and audio files into text
UI_TEXT_CONTENT["AISTUDIO::SETTINGS::DATAMODEL::PREVIEWFEATURESEXTENSIONS::T714355911"] = "Transkription: Vorschau unseres Sprache-zu-Text-Systems, mit dem Sie Aufnahmen und Audiodateien in Text transkribieren können"
-- Use no data sources, when sending an assistant result to a chat
UI_TEXT_CONTENT["AISTUDIO::SETTINGS::DATAMODEL::SENDTOCHATDATASOURCEBEHAVIOREXTENSIONS::T1223925477"] = "Keine Datenquellen vorauswählen, wenn ein Ergebnis von einem Assistenten an einen neuen Chat gesendet wird"
@ -6975,12 +6969,6 @@ UI_TEXT_CONTENT["AISTUDIO::TOOLS::ERICLIENT::ERICLIENTV1::T816853779"] = "Fehler
-- Failed to retrieve the authentication methods: the ERI server did not return a valid response.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::ERICLIENT::ERICLIENTV1::T984407320"] = "Fehler beim Abrufen der Authentifizierungsmethoden: Der ERI-Server hat keine gültige Antwort zurückgegeben."
-- AI Studio couldn't install Pandoc because the archive was not found.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T1059477764"] = "AI Studio konnte Pandoc nicht installieren, da das Archiv nicht gefunden wurde."
-- Pandoc doesn't seem to be installed.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T1090474732"] = "Pandoc scheint nicht installiert zu sein."
-- Was not able to validate the Pandoc installation.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T1364844008"] = "Die Pandoc-Installation konnte nicht überprüft werden."
@ -7002,20 +6990,20 @@ UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T2550598062"] = "Pandoc v{0} ist insta
-- Pandoc v{0} is installed, but it does not match the required version (v{1}).
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T2555465873"] = "Pandoc v{0} ist installiert, entspricht aber nicht der benötigten Version (v{1})."
-- AI Studio couldn't install Pandoc because the archive type is unknown.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T3492710362"] = "AI Studio konnte Pandoc nicht installieren, da der Archivtyp unbekannt ist."
-- Pandoc was not installed successfully, because the archive was not found.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T34210248"] = "Pandoc wurde nicht erfolgreich installiert, da das Archiv nicht gefunden wurde."
-- Pandoc is not available on the system or the process had issues.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T3746116957"] = "Pandoc ist auf dem System nicht verfügbar oder der Vorgang ist auf Probleme gestoßen."
-- AI Studio couldn't install Pandoc because the executable was not found in the archive.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T403983772"] = "AI Studio konnte Pandoc nicht installieren, da die ausführbare Datei im Archiv nicht gefunden wurde."
-- Pandoc was not installed successfully, because the archive type is unknown.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T3962211670"] = "Pandoc wurde nicht erfolgreich installiert, da der Archivtyp unbekannt ist."
-- AI Studio couldn't find the latest Pandoc version and will install version {0} instead.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T695293525"] = "AI Studio konnte die neueste Pandoc-Version nicht finden und installiert stattdessen Version {0}."
-- It seems that Pandoc is not installed.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T567205144"] = "Es scheint, dass Pandoc nicht installiert ist."
-- AI Studio couldn't install Pandoc.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T932858631"] = "AI Studio konnte Pandoc nicht installieren."
-- The latest Pandoc version was not found, installing version {0} instead.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T726914939"] = "Die neueste Pandoc-Version wurde nicht gefunden, stattdessen wird Version {0} installiert."
-- Pandoc is required for Microsoft Word export.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOCEXPORT::T1473115556"] = "Pandoc wird für den Export nach Microsoft Word benötigt."

View File

@ -6021,12 +6021,18 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1890416390"] = "Check for update
-- Vision
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1892426825"] = "Vision"
-- In order to use any LLM, each user must store their so-called API key for each LLM provider. This key must be kept secure, similar to a password. The safest way to do this is offered by operating systems like macOS, Windows, and Linux: They have mechanisms to store such data, if available, on special security hardware. Since this is currently not possible in .NET, we use this Rust library.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1915240766"] = "In order to use any LLM, each user must store their so-called API key for each LLM provider. This key must be kept secure, similar to a password. The safest way to do this is offered by operating systems like macOS, Windows, and Linux: They have mechanisms to store such data, if available, on special security hardware. Since this is currently not possible in .NET, we use this Rust library."
-- This library is used to convert HTML to Markdown. This is necessary, e.g., when you provide a URL as input for an assistant.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1924365263"] = "This library is used to convert HTML to Markdown. This is necessary, e.g., when you provide a URL as input for an assistant."
-- Encryption secret: is configured
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1931141322"] = "Encryption secret: is configured"
-- We use Rocket to implement the runtime API. This is necessary because the runtime must be able to communicate with the user interface (IPC). Rocket is a great framework for implementing web APIs in Rust.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1943216839"] = "We use Rocket to implement the runtime API. This is necessary because the runtime must be able to communicate with the user interface (IPC). Rocket is a great framework for implementing web APIs in Rust."
-- Copies the following to the clipboard
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T2029659664"] = "Copies the following to the clipboard"
@ -6129,12 +6135,6 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3178730036"] = "Have feature ide
-- Hide Details
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3183837919"] = "Hide Details"
-- Axum server runs the internal axum service over a secure local connection. This helps AI Studio protect the communication between the Rust runtime and the user interface.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3208719461"] = "Axum server runs the internal axum service over a secure local connection. This helps AI Studio protect the communication between the Rust runtime and the user interface."
-- Rustls helps secure the internal connection between the app's user interface and the Rust runtime. This protects the local communication that AI Studio needs while it is running.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3239817808"] = "Rustls helps secure the internal connection between the app's user interface and the Rust runtime. This protects the local communication that AI Studio needs while it is running."
-- Update Pandoc
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3249965383"] = "Update Pandoc"
@ -6159,9 +6159,6 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3449345633"] = "AI Studio runs w
-- Tauri is used to host the Blazor user interface. It is a great project that allows the creation of desktop applications using web technologies. I love Tauri!
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3494984593"] = "Tauri is used to host the Blazor user interface. It is a great project that allows the creation of desktop applications using web technologies. I love Tauri!"
-- AI Studio stores secrets like API keys in your operating systems secure credential store. The keyring-core library handles this by connecting to macOS Keychain, Windows Credential Manager, and Linux Secret Service.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3527399572"] = "AI Studio stores secrets like API keys in your operating systems secure credential store. The keyring-core library handles this by connecting to macOS Keychain, Windows Credential Manager, and Linux Secret Service."
-- Motivation
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T3563271893"] = "Motivation"
@ -6231,9 +6228,6 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T836298648"] = "Provided by confi
-- We use this library to be able to read PowerPoint files. This allows us to insert content from slides into prompts and take PowerPoint files into account in RAG processes. We thank Nils Kruthoff for his work on this Rust crate.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T855925638"] = "We use this library to be able to read PowerPoint files. This allows us to insert content from slides into prompts and take PowerPoint files into account in RAG processes. We thank Nils Kruthoff for his work on this Rust crate."
-- Axum is used to provide the small internal service that connects the Rust runtime with the app's user interface. This lets both parts of AI Studio exchange information while the app is running.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T864851737"] = "Axum is used to provide the small internal service that connects the Rust runtime with the app's user interface. This lets both parts of AI Studio exchange information while the app is running."
-- For some data transfers, we need to encode the data in base64. This Rust library is great for this purpose.
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T870640199"] = "For some data transfers, we need to encode the data in base64. This Rust library is great for this purpose."
@ -6678,8 +6672,8 @@ UI_TEXT_CONTENT["AISTUDIO::SETTINGS::DATAMODEL::PREVIEWFEATURESEXTENSIONS::T2708
-- Unknown preview feature
UI_TEXT_CONTENT["AISTUDIO::SETTINGS::DATAMODEL::PREVIEWFEATURESEXTENSIONS::T2722827307"] = "Unknown preview feature"
-- Transcription: Convert recordings and audio files into text
UI_TEXT_CONTENT["AISTUDIO::SETTINGS::DATAMODEL::PREVIEWFEATURESEXTENSIONS::T4247148645"] = "Transcription: Convert recordings and audio files into text"
-- Transcription: Preview of our speech to text system where you can transcribe recordings and audio files into text
UI_TEXT_CONTENT["AISTUDIO::SETTINGS::DATAMODEL::PREVIEWFEATURESEXTENSIONS::T714355911"] = "Transcription: Preview of our speech to text system where you can transcribe recordings and audio files into text"
-- Use no data sources, when sending an assistant result to a chat
UI_TEXT_CONTENT["AISTUDIO::SETTINGS::DATAMODEL::SENDTOCHATDATASOURCEBEHAVIOREXTENSIONS::T1223925477"] = "Use no data sources, when sending an assistant result to a chat"
@ -6975,12 +6969,6 @@ UI_TEXT_CONTENT["AISTUDIO::TOOLS::ERICLIENT::ERICLIENTV1::T816853779"] = "Failed
-- Failed to retrieve the authentication methods: the ERI server did not return a valid response.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::ERICLIENT::ERICLIENTV1::T984407320"] = "Failed to retrieve the authentication methods: the ERI server did not return a valid response."
-- AI Studio couldn't install Pandoc because the archive was not found.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T1059477764"] = "AI Studio couldn't install Pandoc because the archive was not found."
-- Pandoc doesn't seem to be installed.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T1090474732"] = "Pandoc doesn't seem to be installed."
-- Was not able to validate the Pandoc installation.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T1364844008"] = "Was not able to validate the Pandoc installation."
@ -7002,20 +6990,20 @@ UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T2550598062"] = "Pandoc v{0} is instal
-- Pandoc v{0} is installed, but it does not match the required version (v{1}).
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T2555465873"] = "Pandoc v{0} is installed, but it does not match the required version (v{1})."
-- AI Studio couldn't install Pandoc because the archive type is unknown.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T3492710362"] = "AI Studio couldn't install Pandoc because the archive type is unknown."
-- Pandoc was not installed successfully, because the archive was not found.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T34210248"] = "Pandoc was not installed successfully, because the archive was not found."
-- Pandoc is not available on the system or the process had issues.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T3746116957"] = "Pandoc is not available on the system or the process had issues."
-- AI Studio couldn't install Pandoc because the executable was not found in the archive.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T403983772"] = "AI Studio couldn't install Pandoc because the executable was not found in the archive."
-- Pandoc was not installed successfully, because the archive type is unknown.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T3962211670"] = "Pandoc was not installed successfully, because the archive type is unknown."
-- AI Studio couldn't find the latest Pandoc version and will install version {0} instead.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T695293525"] = "AI Studio couldn't find the latest Pandoc version and will install version {0} instead."
-- It seems that Pandoc is not installed.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T567205144"] = "It seems that Pandoc is not installed."
-- AI Studio couldn't install Pandoc.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T932858631"] = "AI Studio couldn't install Pandoc."
-- The latest Pandoc version was not found, installing version {0} instead.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOC::T726914939"] = "The latest Pandoc version was not found, installing version {0} instead."
-- Pandoc is required for Microsoft Word export.
UI_TEXT_CONTENT["AISTUDIO::TOOLS::PANDOCEXPORT::T1473115556"] = "Pandoc is required for Microsoft Word export."

View File

@ -14,7 +14,7 @@ public static class PreviewFeaturesExtensions
PreviewFeatures.PRE_PLUGINS_2025 => TB("Plugins: Preview of our plugin system where you can extend the functionality of the app"),
PreviewFeatures.PRE_READ_PDF_2025 => TB("Read PDF: Preview of our PDF reading system where you can read and extract text from PDF files"),
PreviewFeatures.PRE_DOCUMENT_ANALYSIS_2025 => TB("Document Analysis: Preview of our document analysis system where you can analyze and extract information from documents"),
PreviewFeatures.PRE_SPEECH_TO_TEXT_2026 => TB("Transcription: Convert recordings and audio files into text"),
PreviewFeatures.PRE_SPEECH_TO_TEXT_2026 => TB("Transcription: Preview of our speech to text system where you can transcribe recordings and audio files into text"),
_ => TB("Unknown preview feature")
};
@ -33,7 +33,6 @@ public static class PreviewFeaturesExtensions
PreviewFeatures.PRE_READ_PDF_2025 => true,
PreviewFeatures.PRE_PLUGINS_2025 => true,
PreviewFeatures.PRE_DOCUMENT_ANALYSIS_2025 => true,
PreviewFeatures.PRE_SPEECH_TO_TEXT_2026 => true,
_ => false
};

View File

@ -12,6 +12,7 @@ public static class PreviewVisibilityExtensions
if (visibility >= PreviewVisibility.BETA)
{
features.Add(PreviewFeatures.PRE_DOCUMENT_ANALYSIS_2025);
features.Add(PreviewFeatures.PRE_SPEECH_TO_TEXT_2026);
}
if (visibility >= PreviewVisibility.ALPHA)

View File

@ -35,13 +35,12 @@ public static partial class Pandoc
private static bool HAS_LOGGED_AVAILABILITY_CHECK_ONCE;
private static readonly HttpClient WEB_CLIENT = new();
private static readonly SemaphoreSlim INSTALLATION_LOCK = new(1, 1);
/// <summary>
/// Prepares a Pandoc process by using the Pandoc process builder.
/// </summary>
/// <returns>The Pandoc process builder with default settings.</returns>
private static PandocProcessBuilder PreparePandocProcess() => PandocProcessBuilder.Create();
public static PandocProcessBuilder PreparePandocProcess() => PandocProcessBuilder.Create();
/// <summary>
/// Checks if pandoc is available on the system and can be started as a process or is present in AI Studio's data dir.
@ -146,12 +145,12 @@ public static partial class Pandoc
catch (Exception e)
{
if (showMessages)
await MessageBus.INSTANCE.SendError(new(@Icons.Material.Filled.AppsOutage, TB("Pandoc doesn't seem to be installed.")));
await MessageBus.INSTANCE.SendError(new(@Icons.Material.Filled.AppsOutage, TB("It seems that Pandoc is not installed.")));
if(shouldLog)
LOG.LogError(e, "Pandoc availability check failed. This usually means Pandoc is not installed or not in the system PATH.");
return new(false, TB("Pandoc doesn't seem to be installed."), false, string.Empty, false);
return new(false, TB("It seems that Pandoc is not installed."), false, string.Empty, false);
}
finally
{
@ -166,230 +165,76 @@ public static partial class Pandoc
/// <returns>None</returns>
public static async Task InstallAsync(RustService rustService)
{
await INSTALLATION_LOCK.WaitAsync();
var latestVersion = await FetchLatestVersionAsync();
var installDir = await GetPandocDataFolder(rustService);
var installParentDir = Path.GetDirectoryName(installDir) ?? Path.GetTempPath();
var stagingDir = Path.Combine(installParentDir, $"pandoc-install-{Guid.NewGuid():N}");
var pandocTempDownloadFile = Path.GetTempFileName();
ClearFolder(installDir);
LOG.LogInformation("Trying to install Pandoc v{0} to '{1}'...", latestVersion, installDir);
try
{
if (!Directory.Exists(installParentDir))
Directory.CreateDirectory(installParentDir);
if (!Directory.Exists(installDir))
Directory.CreateDirectory(installDir);
// Create a temporary file to download the archive to:
var pandocTempDownloadFile = Path.GetTempFileName();
//
// Download the latest Pandoc archive from GitHub:
//
var uri = GenerateArchiveUri(latestVersion);
if (string.IsNullOrWhiteSpace(uri))
{
await MessageBus.INSTANCE.SendError(new (Icons.Material.Filled.Error, TB("AI Studio couldn't install Pandoc because the archive type is unknown.")));
LOG.LogError("Pandoc was not installed, no archive is available for architecture '{Architecture}'.", CPU_ARCHITECTURE.ToUserFriendlyName());
return;
}
using var response = await WEB_CLIENT.GetAsync(uri);
var uri = await GenerateArchiveUriAsync();
var response = await WEB_CLIENT.GetAsync(uri);
if (!response.IsSuccessStatusCode)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Error, TB("AI Studio couldn't install Pandoc because the archive was not found.")));
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Error, TB("Pandoc was not installed successfully, because the archive was not found.")));
LOG.LogError("Pandoc was not installed successfully, because the archive was not found (status code {0}): url='{1}', message='{2}'", response.StatusCode, uri, response.RequestMessage);
return;
}
// Download the archive to the temporary file:
await using (var tempFileStream = File.Create(pandocTempDownloadFile))
{
await response.Content.CopyToAsync(tempFileStream);
await tempFileStream.FlushAsync();
}
await using var tempFileStream = File.Create(pandocTempDownloadFile);
await response.Content.CopyToAsync(tempFileStream);
Directory.CreateDirectory(stagingDir);
if (uri.EndsWith(".zip", StringComparison.OrdinalIgnoreCase))
{
await RunWithRetriesAsync(
() =>
{
ZipFile.ExtractToDirectory(pandocTempDownloadFile, stagingDir, true);
return Task.CompletedTask;
},
"extracting the Pandoc ZIP archive");
ZipFile.ExtractToDirectory(pandocTempDownloadFile, installDir);
}
else if (uri.EndsWith(".tar.gz", StringComparison.OrdinalIgnoreCase))
{
await RunWithRetriesAsync(
async () =>
{
await using var tgzStream = File.Open(pandocTempDownloadFile, FileMode.Open, FileAccess.Read, FileShare.Read);
await using var uncompressedStream = new GZipStream(tgzStream, CompressionMode.Decompress);
await TarFile.ExtractToDirectoryAsync(uncompressedStream, stagingDir, true);
},
"extracting the Pandoc TAR archive");
await using var tgzStream = File.Open(pandocTempDownloadFile, FileMode.Open, FileAccess.Read, FileShare.Read);
await using var uncompressedStream = new GZipStream(tgzStream, CompressionMode.Decompress);
await TarFile.ExtractToDirectoryAsync(uncompressedStream, installDir, true);
}
else
{
await MessageBus.INSTANCE.SendError(new (Icons.Material.Filled.Error, TB("AI Studio couldn't install Pandoc because the archive type is unknown.")));
await MessageBus.INSTANCE.SendError(new (Icons.Material.Filled.Error, TB("Pandoc was not installed successfully, because the archive type is unknown.")));
LOG.LogError("Pandoc was not installed, the archive is unknown: url='{0}'", uri);
return;
}
var stagedPandocExecutable = FindExecutableInDirectory(stagingDir, PandocProcessBuilder.PandocExecutableName);
if (string.IsNullOrWhiteSpace(stagedPandocExecutable))
{
await MessageBus.INSTANCE.SendError(new (Icons.Material.Filled.Error, TB("AI Studio couldn't install Pandoc because the executable was not found in the archive.")));
LOG.LogError("Pandoc was not installed, the executable was not found in the extracted archive: '{StagingDir}'.", stagingDir);
return;
}
LOG.LogInformation("Found Pandoc executable in downloaded archive: '{Executable}'.", stagedPandocExecutable);
await ReplaceInstallationDirectoryAsync(stagingDir, installDir);
File.Delete(pandocTempDownloadFile);
await MessageBus.INSTANCE.SendSuccess(new(Icons.Material.Filled.CheckCircle, string.Format(TB("Pandoc v{0} was installed successfully."), latestVersion)));
LOG.LogInformation("Pandoc v{0} was installed successfully.", latestVersion);
}
catch (Exception ex)
{
await MessageBus.INSTANCE.SendError(new(Icons.Material.Filled.Error, TB("AI Studio couldn't install Pandoc.")));
LOG.LogError(ex, "An error occurred while installing Pandoc.");
}
finally
{
TryDeleteFile(pandocTempDownloadFile);
if (Directory.Exists(stagingDir))
await TryDeleteFolderAsync(stagingDir);
INSTALLATION_LOCK.Release();
}
}
private static async Task ReplaceInstallationDirectoryAsync(string stagingDir, string installDir)
private static void ClearFolder(string path)
{
var backupDir = $"{installDir}.backup-{Guid.NewGuid():N}";
var hasBackup = false;
var stagingWasMoved = false;
try
{
if (Directory.Exists(installDir))
{
await MoveDirectoryWithRetriesAsync(installDir, backupDir, "moving the previous Pandoc installation to backup");
hasBackup = true;
}
await MoveDirectoryWithRetriesAsync(stagingDir, installDir, "moving the new Pandoc installation into place");
stagingWasMoved = true;
}
catch (Exception ex)
{
if (hasBackup && !stagingWasMoved && !Directory.Exists(installDir) && Directory.Exists(backupDir))
{
try
{
await MoveDirectoryWithRetriesAsync(backupDir, installDir, "restoring the previous Pandoc installation");
hasBackup = false;
}
catch (Exception rollbackEx)
{
LOG.LogError(rollbackEx, "Error restoring previous Pandoc installation directory. Keeping backup directory at: '{BackupDir}'.", backupDir);
}
}
LOG.LogError(ex, "Error replacing pandoc installation directory.");
throw;
}
finally
{
if (hasBackup && stagingWasMoved && Directory.Exists(backupDir))
await TryDeleteFolderAsync(backupDir);
}
}
private static string FindExecutableInDirectory(string rootDirectory, string executableName)
{
if (!Directory.Exists(rootDirectory))
return string.Empty;
var rootExecutablePath = Path.Combine(rootDirectory, executableName);
if (File.Exists(rootExecutablePath))
return rootExecutablePath;
foreach (var subdirectory in Directory.GetDirectories(rootDirectory, "*", SearchOption.AllDirectories))
{
var pandocPath = Path.Combine(subdirectory, executableName);
if (File.Exists(pandocPath))
return pandocPath;
}
return string.Empty;
}
private static async Task MoveDirectoryWithRetriesAsync(string sourceDir, string destinationDir, string operationName)
{
await RunWithRetriesAsync(
() =>
{
Directory.Move(sourceDir, destinationDir);
return Task.CompletedTask;
},
operationName,
maxAttempts: 8);
}
private static async Task RunWithRetriesAsync(Func<Task> operation, string operationName, int maxAttempts = 4)
{
for (var attempt = 1; attempt <= maxAttempts; attempt++)
{
try
{
await operation();
return;
}
catch (Exception ex) when (attempt < maxAttempts && ex is IOException or UnauthorizedAccessException)
{
LOG.LogWarning(ex, "Error while {OperationName}; retrying attempt {Attempt}/{MaxAttempts}.", operationName, attempt + 1, maxAttempts);
await Task.Delay(TimeSpan.FromMilliseconds(250 * attempt));
}
}
}
private static void TryDeleteFile(string path)
{
if (string.IsNullOrWhiteSpace(path) || !File.Exists(path))
if (!Directory.Exists(path))
return;
try
{
File.Delete(path);
Directory.Delete(path, true);
}
catch (Exception ex)
{
LOG.LogWarning(ex, "Was not able to delete temporary Pandoc archive: '{Path}'.", path);
}
}
private static async Task TryDeleteFolderAsync(string path)
{
if (string.IsNullOrWhiteSpace(path) || !Directory.Exists(path))
return;
try
{
await RunWithRetriesAsync(
() =>
{
Directory.Delete(path, true);
return Task.CompletedTask;
},
$"deleting temporary Pandoc directory '{path}'",
maxAttempts: 3);
}
catch (Exception ex)
{
LOG.LogWarning(ex, "Was not able to delete temporary Pandoc directory: '{Path}'.", path);
LOG.LogError(ex, "Error clearing pandoc installation directory.");
}
}
@ -403,7 +248,7 @@ public static partial class Pandoc
if (!response.IsSuccessStatusCode)
{
LOG.LogError("Code {StatusCode}: Could not fetch Pandoc's latest page: {Response}", response.StatusCode, response.RequestMessage);
await MessageBus.INSTANCE.SendWarning(new (Icons.Material.Filled.Warning, string.Format(TB("AI Studio couldn't find the latest Pandoc version and will install version {0} instead."), FALLBACK_VERSION.ToString())));
await MessageBus.INSTANCE.SendWarning(new (Icons.Material.Filled.Warning, string.Format(TB("The latest Pandoc version was not found, installing version {0} instead."), FALLBACK_VERSION.ToString())));
return FALLBACK_VERSION.ToString();
}
@ -412,7 +257,7 @@ public static partial class Pandoc
if (!versionMatch.Success)
{
LOG.LogError("The latest version regex returned nothing: {0}", versionMatch.Groups.ToString());
await MessageBus.INSTANCE.SendWarning(new (Icons.Material.Filled.Warning, string.Format(TB("AI Studio couldn't find the latest Pandoc version and will install version {0} instead."), FALLBACK_VERSION.ToString())));
await MessageBus.INSTANCE.SendWarning(new (Icons.Material.Filled.Warning, string.Format(TB("The latest Pandoc version was not found, installing version {0} instead."), FALLBACK_VERSION.ToString())));
return FALLBACK_VERSION.ToString();
}
@ -427,11 +272,6 @@ public static partial class Pandoc
public static async Task<string> GenerateArchiveUriAsync()
{
var version = await FetchLatestVersionAsync();
return GenerateArchiveUri(version);
}
private static string GenerateArchiveUri(string version)
{
var baseUri = $"{DOWNLOAD_URL}/{version}/pandoc-{version}-";
return CPU_ARCHITECTURE switch
{

View File

@ -220,17 +220,6 @@ public sealed class PandocProcessBuilder
}
}
foreach (var candidate in SystemPandocExecutableCandidates(PandocExecutableName))
{
if (!File.Exists(candidate))
continue;
if (shouldLog)
LOGGER.LogInformation("Found system Pandoc installation at: '{Path}'.", candidate);
return new(candidate, false);
}
//
// When no local installation was found, we assume that the pandoc executable is in the system PATH:
//
@ -249,59 +238,4 @@ public sealed class PandocProcessBuilder
/// Reads the os platform to determine the used executable name.
/// </summary>
public static string PandocExecutableName => CPU_ARCHITECTURE is RID.WIN_ARM64 or RID.WIN_X64 ? "pandoc.exe" : "pandoc";
private static IEnumerable<string> SystemPandocExecutableCandidates(string executableName)
{
var candidates = new List<string>();
switch (CPU_ARCHITECTURE)
{
case RID.WIN_X64 or RID.WIN_ARM64:
AddCandidate(candidates, Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), "Pandoc", executableName);
AddCandidate(candidates, Environment.GetFolderPath(Environment.SpecialFolder.ProgramFiles), "Pandoc", executableName);
AddCandidate(candidates, Environment.GetFolderPath(Environment.SpecialFolder.ProgramFilesX86), "Pandoc", executableName);
break;
case RID.OSX_X64 or RID.OSX_ARM64:
AddCandidate(candidates, "/opt/homebrew/bin", executableName);
AddCandidate(candidates, "/usr/local/bin", executableName);
AddCandidate(candidates, "/usr/bin", executableName);
break;
case RID.LINUX_X64 or RID.LINUX_ARM64:
AddCandidate(candidates, "/usr/local/bin", executableName);
AddCandidate(candidates, "/usr/bin", executableName);
AddCandidate(candidates, "/snap/bin", executableName);
var homeDirectory = Environment.GetFolderPath(Environment.SpecialFolder.UserProfile);
AddCandidate(candidates, homeDirectory, ".local", "bin", executableName);
break;
}
foreach (var pathDirectory in GetPathDirectories())
AddCandidate(candidates, pathDirectory, executableName);
var comparer = CPU_ARCHITECTURE is RID.WIN_X64 or RID.WIN_ARM64
? StringComparer.OrdinalIgnoreCase
: StringComparer.Ordinal;
return candidates.Distinct(comparer);
}
private static IEnumerable<string> GetPathDirectories()
{
var pathValue = Environment.GetEnvironmentVariable("PATH");
if (string.IsNullOrWhiteSpace(pathValue))
yield break;
foreach (var pathDirectory in pathValue.Split(Path.PathSeparator, StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries))
yield return pathDirectory;
}
private static void AddCandidate(List<string> candidates, params string[] pathParts)
{
if (pathParts.Any(string.IsNullOrWhiteSpace))
return;
candidates.Add(Path.Combine(pathParts));
}
}

View File

@ -185,7 +185,9 @@ public sealed class GlobalShortcutService : BackgroundService, IMessageBusReceiv
return new(shortcut, isEnabled, false);
var fallbackShortcut = settingsSnapshot.App.ShortcutVoiceRecording;
var fallbackEnabled = !string.IsNullOrWhiteSpace(settingsSnapshot.App.UseTranscriptionProvider);
var fallbackEnabled =
settingsSnapshot.App.EnabledPreviewFeatures.Contains(PreviewFeatures.PRE_SPEECH_TO_TEXT_2026) &&
!string.IsNullOrWhiteSpace(settingsSnapshot.App.UseTranscriptionProvider);
if (!fallbackEnabled || string.IsNullOrWhiteSpace(fallbackShortcut))
return new(shortcut, isEnabled, false);

View File

@ -7,8 +7,7 @@ public sealed partial class RustService
public async Task<DirectorySelectionResponse> SelectDirectory(string title, string? initialDirectory = null)
{
PreviousDirectory? previousDirectory = initialDirectory is null ? null : new (initialDirectory);
var encodedTitle = Uri.EscapeDataString(title);
var result = await this.http.PostAsJsonAsync($"/select/directory?title={encodedTitle}", previousDirectory, this.jsonRustSerializerOptions);
var result = await this.http.PostAsJsonAsync($"/select/directory?title={title}", previousDirectory, this.jsonRustSerializerOptions);
if (!result.IsSuccessStatusCode)
{
this.logger!.LogError($"Failed to select a directory: '{result.StatusCode}'");

View File

@ -13,16 +13,7 @@ public sealed partial class RustService
var response = await this.http.SendAsync(request, HttpCompletionOption.ResponseHeadersRead);
if (!response.IsSuccessStatusCode)
{
var responseBody = await response.Content.ReadAsStringAsync();
this.logger?.LogError(
"Failed to read arbitrary file data from Rust runtime. Status: {StatusCode}, reason: '{ReasonPhrase}', path: '{Path}', body: '{Body}'",
response.StatusCode,
response.ReasonPhrase,
path,
responseBody);
return string.Empty;
}
var resultBuilder = new StringBuilder();

View File

@ -22,28 +22,24 @@
},
"LuaCSharp": {
"type": "Direct",
"requested": "[0.5.5, )",
"resolved": "0.5.5",
"contentHash": "IL44DCbMtEafyiy8DzHFd/f+1pXuDUVFJMCJPAu8vQHNfO3ADSoWSOKMg9Py1za/ZE1K0gs0jll1viInoN+19Q==",
"dependencies": {
"LuaCSharp.Annotations": "0.5.5",
"LuaCSharp.SourceGenerator": "0.5.5"
}
"requested": "[0.5.3, )",
"resolved": "0.5.3",
"contentHash": "qpgmCaNx08+eiWOmz7U/mXOH8DXUyLW8fsCukKjN8hVled2y4HrapsZlmrnIf9iaNfEQusUR/8d1M2XX6NIzbQ=="
},
"Microsoft.Extensions.FileProviders.Embedded": {
"type": "Direct",
"requested": "[9.0.16, )",
"resolved": "9.0.16",
"contentHash": "QRlSWz7zEplBxETrySKK3qpPm/7NPaRGnUpEXQNP3k6Ht2KdVy59JcoUPXlNGnNE3tJd3ycXfMeWqxBG6SyV0w==",
"requested": "[9.0.15, )",
"resolved": "9.0.15",
"contentHash": "XFlI3ZISL344QdPLtaXG0yPyjkHQR82DYXrJa9aF00Qeu7dDnFxwFgP/ItkkyiLjAe/NSj6vksxOdnelXGT1vQ==",
"dependencies": {
"Microsoft.Extensions.FileProviders.Abstractions": "9.0.16"
"Microsoft.Extensions.FileProviders.Abstractions": "9.0.15"
}
},
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[9.0.16, )",
"resolved": "9.0.16",
"contentHash": "ccPBYGLPJt8DeJTUzQ0JzOh/iuUAgnjayU63PokVywAhUOx+dzDKSPTL7AG94U/VpvNXflTT2AjsFAIF1+bXBw=="
"requested": "[9.0.15, )",
"resolved": "9.0.15",
"contentHash": "EejcbfCMR77Dthy77qxRbEShmzLApHZUPqXMBVQK+A0pNrRThkaHoGGMGvbq/gTkC/waKcDEgjBkbaejB58Wtw=="
},
"MudBlazor": {
"type": "Direct",
@ -68,9 +64,9 @@
},
"Qdrant.Client": {
"type": "Direct",
"requested": "[1.18.1, )",
"resolved": "1.18.1",
"contentHash": "eBwFLihGMvN02/jr/BNdcop2XmtA10y8VMOclVZ7K2H8yheAhl7jbkf7I8e4X3RYpT+cAxgcalP4xmOhgs4KJg==",
"requested": "[1.17.0, )",
"resolved": "1.17.0",
"contentHash": "QFNtVu4Kiz6NHAAi2UQk+Ia64/qyX1NMecQGIBGnKqFOlpnxI3OCCBRBKXWGPk/c+4vAmR3Dj+cQ9apqX0zU8A==",
"dependencies": {
"Google.Protobuf": "3.31.0",
"Grpc.Net.Client": "2.71.0"
@ -117,16 +113,6 @@
"Grpc.Core.Api": "2.71.0"
}
},
"LuaCSharp.Annotations": {
"type": "Transitive",
"resolved": "0.5.5",
"contentHash": "5VcwcTNGCY5YXLz2BRko5/Z0YGd6MZqNsnnfPOsGHHpAtqWPFbD0vtOZR4jUqaQLtQUvl2+WRfmIOhp6L2S0rw=="
},
"LuaCSharp.SourceGenerator": {
"type": "Transitive",
"resolved": "0.5.5",
"contentHash": "2xHKGc1bYXTsmSzZCNmKkuAU6A+1azulNiPY/ICKBSHIgEPMNRQ7JS6PvAClrHe6bk8SKcC/fbba6igtDzDaAw=="
},
"Markdig": {
"type": "Transitive",
"resolved": "0.41.3",
@ -196,10 +182,10 @@
},
"Microsoft.Extensions.FileProviders.Abstractions": {
"type": "Transitive",
"resolved": "9.0.16",
"contentHash": "/YLSWDs+p0Y4+UGPoWI3uUNq7R5/f/8zw8XeViuhfSTGnPowoqbllBE9aR4TteFgNfIH4IHkhUwSlhMLB0aL8g==",
"resolved": "9.0.15",
"contentHash": "yzWilnNU/MvHINapPhY6iFAeApZnhToXbEBplORucn01hFc1F6ZaKt0V9dHYpUMun8WR9cSnq1ky35FWREVZbA==",
"dependencies": {
"Microsoft.Extensions.Primitives": "9.0.16"
"Microsoft.Extensions.Primitives": "9.0.15"
}
},
"Microsoft.Extensions.Localization": {
@ -237,8 +223,8 @@
},
"Microsoft.Extensions.Primitives": {
"type": "Transitive",
"resolved": "9.0.16",
"contentHash": "w5RE1MR0lnAElsRJaFd2POIXl/H62aBKmfX8ibYmRmbk0JB9V/9jR0VD5NxiP1ETWpnDAnPguTSe7fF/FdsHEQ=="
"resolved": "9.0.15",
"contentHash": "WRPJ9kpIwsOcghRT0tduIqiz7CDv7WsnL4kTJavtHS4j5AW++4LlR63oOSTL2o/zLR4T1z0/FQMgrnsPJ5bpQQ=="
},
"Microsoft.JSInterop": {
"type": "Transitive",

View File

@ -1,2 +1 @@
# v26.5.2, build 237 (2026-05-06 16:38 UTC)
- Updated the underlying Tauri framework from version 1 to the latest version 2. Please do not install this prerelease manually. Production versions such as v26.4.1 will ignore this update. We are using this prerelease to test the clean update path for the migration from the Tauri v1 framework to the Tauri v2 framework. After a successful test, this prerelease will be removed.
# v26.5.2, build 237 (2026-05-xx xx:xx UTC)

View File

@ -1,2 +0,0 @@
# v26.5.3, build 238 (2026-05-13 09:50 UTC)
- Migrated away from Rocket to Axum for our internal IPC API. Please do not install this prerelease manually. Production versions, such as v26.4.1, will ignore this update. We are using this prerelease to test the clean update path. After a successful test, this prerelease will be removed.

View File

@ -1,2 +0,0 @@
# v26.5.4, build 239 (2026-05-13 11:58 UTC)
- Migrated away from Rocket to Axum for our internal IPC API. Please do not install this prerelease manually. Production versions, such as v26.4.1, will ignore this update. We are using this prerelease to test the clean update path. After a successful test, this prerelease will be removed.

View File

@ -1,12 +0,0 @@
# v26.5.5, build 240 (2026-05-xx xx:xx UTC)
- Released the voice recording and transcription for all users. You no longer need to enable a preview feature to configure transcription providers, select a transcription provider, or use dictation.
- Improved the app's security foundation with major modernization of the native runtime and its internal communication layer. This work is mostly invisible during everyday use, but it replaces older components that no longer received the security updates we require. We also continued updating security-sensitive dependencies so AI Studio stays on a healthier, better maintained base.
- Improved the Pandoc management and detection process to make it more reliable.
- Fixed the Pandoc installation, which could fail and prevent AI Studio from installing its local Pandoc dependency.
- Upgraded the native secret storage integration to `keyring-core`, keeping API keys in the secure credential store provided by the operating system.
- Upgraded Rust to v1.95.0.
- Upgraded .NET to v9.0.16.
- Upgraded Tauri to v2.11.1.
- Upgraded PDFium to v148.0.7763.0.
- Upgraded Qdrant to v1.18.0.
- Upgraded other dependencies as well.

View File

@ -9,7 +9,7 @@ Therefore, we cannot provide a static list here that is valid for all Linux syst
## Prerequisites
1. Install the [.NET 9 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/9.0).
2. [Install the Rust compiler](https://www.rust-lang.org/tools/install) in the latest stable version.
3. Meet the prerequisites for building [Tauri](https://v2.tauri.app/start/prerequisites/). Node.js is **not** required, though.
3. Met the prerequisites for building [Tauri](https://tauri.app/v1/guides/getting-started/prerequisites/). Node.js is **not** required, though.
4. The core team uses [JetBrains](https://www.jetbrains.com/) [Rider](https://www.jetbrains.com/rider/) and [RustRover](https://www.jetbrains.com/rust/) for development. Both IDEs are free to use for open-source projects for non-commercial use. They are available for macOS, Linux, and Windows systems. Profiles are provided for these IDEs, so you can get started right away. However, you can also use a different IDE.
4. Clone the repository.
@ -17,7 +17,7 @@ Therefore, we cannot provide a static list here that is valid for all Linux syst
Regardless of whether you want to build the app locally for yourself (not trusting the pre-built binaries) or test your changes before creating a PR, you have to run the following commands at least once:
1. Open a terminal.
2. Install the Tauri CLI by running `cargo install tauri-cli --version 2.11.0 --locked`.
2. Install the Tauri CLI by running `cargo install --version 1.6.2 tauri-cli`.
3. Navigate to the `/app/Build` directory within the repository.
4. Run `dotnet run build` to build the entire app.

View File

@ -84,4 +84,4 @@ We have to figure out if you have an Intel/AMD or a modern ARM system on your Li
2. Open a terminal and navigate to the Downloads folder: `cd Downloads`.
3. Make the AppImage executable: `chmod +x mind-work-ai-studio_amd64.AppImage`.
4. You might want to move the AppImage to a more convenient location, e.g., your home directory: `mv mind-work-ai-studio_amd64.AppImage ~/`.
5. Now you can run the AppImage from your file manager (double-click) or the terminal: `./mind-work-ai-studio_amd64.AppImage`.
4. Now you can run the AppImage from your file manager (double-click) or the terminal: `./mind-work-ai-studio_amd64.AppImage`.

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 32 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 31 KiB

View File

@ -1,12 +1,12 @@
26.5.4
2026-05-13 11:58:02 UTC
239
9.0.117 (commit 6e241a69c1)
9.0.16 (commit a1e6809fb8)
1.95.0 (commit 59807616e)
26.5.1
2026-05-06 13:06:02 UTC
236
9.0.116 (commit fb4af7e1b3)
9.0.15 (commit 4250c8399a)
1.93.1 (commit 01f6ddf75)
8.15.0
2.11.1
0089849e0c3, release
1.8.3
ece329140e4, release
osx-arm64
148.0.7763.0
1.18.0
144.0.7543.0
1.17.1

4355
runtime/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,66 +1,61 @@
[package]
name = "mindwork-ai-studio"
version = "26.5.4"
edition = "2024"
version = "26.5.1"
edition = "2021"
description = "MindWork AI Studio"
authors = ["Thorsten Sommer"]
[build-dependencies]
tauri-build = { version = "2.6.1", features = [] }
tauri-build = { version = "1.5.6", features = [] }
[dependencies]
tauri = { version = "2.11.1", features = [] }
tauri-plugin-window-state = { version = "2.4.1" }
tauri-plugin-shell = "2.3.5"
tauri-plugin-dialog = "2.7.1"
tauri-plugin-opener = "2.5.4"
tauri = { version = "1.8.3", features = [ "http-all", "updater", "shell-sidecar", "shell-open", "dialog", "global-shortcut"] }
tauri-plugin-window-state = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.149"
keyring-core = "1.0.0"
keyring = { version = "3.6.2", features = ["apple-native", "windows-native", "sync-secret-service"] }
arboard = "3.6.1"
tokio = { version = "1.52.3", features = ["rt", "rt-multi-thread", "macros", "process"] }
tokio = { version = "1.50.0", features = ["rt", "rt-multi-thread", "macros", "process"] }
tokio-stream = "0.1.18"
futures = "0.3.32"
async-stream = "0.3.6"
flexi_logger = "0.31.8"
log = { version = "0.4.29", features = ["kv"] }
once_cell = "1.21.4"
axum = { version = "0.8.9", features = ["http2", "json", "query", "tokio"] }
axum-server = { version = "0.8.0", features = ["tls-rustls"] }
rustls = { version = "0.23.28", default-features = false, features = ["aws_lc_rs"] }
rocket = { version = "0.5.1", features = ["json", "tls"] }
rand = "0.10.1"
rand_chacha = "0.10.0"
base64 = "0.22.1"
aes = "0.9.0"
cbc = "0.2.0"
pbkdf2 = "0.13.0"
hmac = "0.13.0"
sha2 = "0.11.0"
rcgen = { version = "0.14.8", features = ["pem"] }
file-format = "0.29.0"
calamine = "0.35.0"
pdfium-render = "0.9.1"
aes = "0.8.4"
cbc = "0.1.2"
pbkdf2 = "0.12.2"
hmac = "0.12.1"
sha2 = "0.10.8"
rcgen = { version = "0.14.7", features = ["pem"] }
file-format = "0.28.0"
calamine = "0.34.0"
pdfium-render = "0.8.37"
sys-locale = "0.3.2"
cfg-if = "1.0.4"
pptx-to-md = "0.4.0"
tempfile = "3.27.0"
strum_macros = "0.28.0"
sysinfo = "0.39.1"
bytes = "1.11.1"
sysinfo = "0.38.4"
# Fixes security vulnerability downstream, where the upstream is not fixed yet:
time = "0.3.47" # -> Rocket
bytes = "1.11.1" # -> almost every dependency
tar = "0.4.45" # -> Tauri v1
[target.'cfg(target_os = "linux")'.dependencies]
# See issue https://github.com/tauri-apps/tauri/issues/4470
reqwest = { version = "0.13.2", features = ["native-tls-vendored"] }
# Fixes security vulnerability downstream, where the upstream is not fixed yet:
openssl = "0.10.76" # -> reqwest, Tauri v1
[target.'cfg(target_os = "windows")'.dependencies]
windows-registry = "0.6.1"
windows-native-keyring-store = "1.0.0"
[target.'cfg(target_os = "macos")'.dependencies]
apple-native-keyring-store = { version = "1.0.0", features = ["keychain"] }
[target.'cfg(target_os = "linux")'.dependencies]
dbus-secret-service-keyring-store = { version = "1.0.0", features = ["crypto-rust"] }
[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies]
tauri-plugin-global-shortcut = "2"
tauri-plugin-updater = "2.10.0"
[features]
custom-protocol = ["tauri/custom-protocol"]

View File

@ -53,18 +53,6 @@ fn update_cargo_toml(cargo_path: &str, version: &str) {
let cargo_toml_lines = cargo_toml.lines();
let mut new_cargo_toml = String::new();
// Return early when the version already matches to avoid unnecessary rewrites.
let current_version = cargo_toml.lines().find_map(|line| {
let trimmed = line.trim_start();
let rest = trimmed.strip_prefix("\"version\": ")?;
let quoted = rest.strip_prefix('"')?;
let end_idx = quoted.find('"')?;
Some(&quoted[..end_idx])
});
if current_version == Some(version) {
return;
}
for line in cargo_toml_lines {
if line.starts_with("version = ") {
new_cargo_toml.push_str(&format!("version = \"{version}\""));
@ -79,19 +67,6 @@ fn update_cargo_toml(cargo_path: &str, version: &str) {
fn update_tauri_conf(tauri_conf_path: &str, version: &str) {
let tauri_conf = std::fs::read_to_string(tauri_conf_path).unwrap();
// Return early when the version already matches to avoid unnecessary rewrites.
let current_version = tauri_conf.lines().find_map(|line| {
let trimmed = line.trim_start();
let rest = trimmed.strip_prefix("\"version\": ")?;
let quoted = rest.strip_prefix('"')?;
let end_idx = quoted.find('"')?;
Some(&quoted[..end_idx])
});
if current_version == Some(version) {
return;
}
let tauri_conf_lines = tauri_conf.lines();
let mut new_tauri_conf = String::new();
@ -100,7 +75,7 @@ fn update_tauri_conf(tauri_conf_path: &str, version: &str) {
// "version": "0.1.0-alpha.0"
// Please notice, that the version number line might have a leading tab, etc.
if line.contains("\"version\": ") {
new_tauri_conf.push_str(&format!(" \"version\": \"{version}\","));
new_tauri_conf.push_str(&format!("\t\"version\": \"{version}\""));
} else {
new_tauri_conf.push_str(line);
}

View File

@ -1,34 +0,0 @@
{
"$schema": "../gen/schemas/desktop-schema.json",
"identifier": "default",
"description": "Default capability for MindWork AI Studio",
"remote": {
"urls": [
"http://localhost:*"
]
},
"windows": [
"main"
],
"permissions": [
"core:default",
"updater:default",
"opener:default",
"shell:allow-open",
{
"identifier": "shell:allow-spawn",
"allow": [
{
"name": "mindworkAIStudioServer",
"sidecar": true,
"args": true
},
{
"name": "qdrant",
"sidecar": true,
"args": true
}
]
}
]
}

View File

@ -1,23 +1,17 @@
use std::collections::HashMap;
use std::convert::Infallible;
use std::sync::Mutex;
use std::time::Duration;
use async_stream::stream;
use axum::body::Body;
use axum::http::header::CONTENT_TYPE;
use axum::response::{IntoResponse, Response};
use axum::Json;
use bytes::Bytes;
use log::{debug, error, info, trace, warn};
use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize};
use rocket::{get, post};
use rocket::response::stream::TextStream;
use rocket::serde::json::Json;
use rocket::serde::Serialize;
use serde::Deserialize;
use strum_macros::Display;
use tauri::{DragDropEvent,RunEvent, Manager, WindowEvent, generate_context};
use tauri::path::PathResolver;
use tauri::WebviewWindow;
use tauri_plugin_updater::{UpdaterExt, Update};
use tauri_plugin_global_shortcut::GlobalShortcutExt;
use tauri_plugin_opener::OpenerExt;
use tauri::updater::UpdateResponse;
use tauri::{FileDropEvent, GlobalShortcutManager, UpdaterEvent, RunEvent, Manager, PathResolver, Window, WindowEvent, generate_context};
use tauri::api::dialog::blocking::FileDialogBuilder;
use tokio::sync::broadcast;
use tokio::time;
use crate::api_token::APIToken;
@ -30,10 +24,10 @@ use crate::qdrant::{cleanup_qdrant, start_qdrant_server, stop_qdrant_server};
use crate::dotnet::create_startup_env_file;
/// The Tauri main window.
pub static MAIN_WINDOW: Lazy<Mutex<Option<WebviewWindow>>> = Lazy::new(|| Mutex::new(None));
static MAIN_WINDOW: Lazy<Mutex<Option<Window>>> = Lazy::new(|| Mutex::new(None));
/// The update response coming from the Tauri updater.
static CHECK_UPDATE_RESPONSE: Lazy<Mutex<Option<Update>>> = Lazy::new(|| Mutex::new(None));
static CHECK_UPDATE_RESPONSE: Lazy<Mutex<Option<UpdateResponse<tauri::Wry>>>> = Lazy::new(|| Mutex::new(None));
/// The event broadcast sender for Tauri events.
static EVENT_BROADCAST: Lazy<Mutex<Option<broadcast::Sender<Event>>>> = Lazy::new(|| Mutex::new(None));
@ -41,9 +35,6 @@ static EVENT_BROADCAST: Lazy<Mutex<Option<broadcast::Sender<Event>>>> = Lazy::ne
/// Stores the currently registered global shortcuts (name -> shortcut string).
static REGISTERED_SHORTCUTS: Lazy<Mutex<HashMap<Shortcut, String>>> = Lazy::new(|| Mutex::new(HashMap::new()));
/// Stores the localhost origin of the Blazor app after the .NET server is ready.
static APPROVED_APP_URL: Lazy<Mutex<Option<tauri::Url>>> = Lazy::new(|| Mutex::new(None));
/// Enum identifying global keyboard shortcuts.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, Display)]
#[strum(serialize_all = "SCREAMING_SNAKE_CASE")]
@ -85,34 +76,10 @@ pub fn start_tauri() {
});
let app = tauri::Builder::default()
.plugin(tauri_plugin_dialog::init())
.plugin(tauri_plugin_shell::init())
.plugin(tauri_plugin_opener::init())
.plugin(
tauri::plugin::Builder::<tauri::Wry, ()>::new("external-link-handler")
.on_navigation(|webview, url| {
if !should_open_in_system_browser(webview, url) {
return true;
}
match webview.app_handle().opener().open_url(url.as_str(), None::<&str>) {
Ok(_) => {
info!(Source = "Tauri"; "Opening external URL in system browser: {url}");
},
Err(error) => {
error!(Source = "Tauri"; "Failed to open external URL '{url}' in system browser: {error}");
},
}
false
})
.build(),
)
.plugin(tauri_plugin_global_shortcut::Builder::new().build())
.plugin(tauri_plugin_updater::Builder::new().build())
.setup(move |app| {
// Get the main window:
let window = app.get_webview_window("main").expect("Failed to get main window.");
let window = app.get_window("main").expect("Failed to get main window.");
// Register a callback for window events, such as file drops. We have to use
// this handler in addition to the app event handler, because file drop events
@ -133,27 +100,27 @@ pub fn start_tauri() {
*MAIN_WINDOW.lock().unwrap() = Some(window);
info!(Source = "Bootloader Tauri"; "Setup is running.");
let data_path = app.path().app_local_data_dir().unwrap();
let data_path = app.path_resolver().app_local_data_dir().unwrap();
let data_path = data_path.join("data");
// Get and store the data and config directories:
DATA_DIRECTORY.set(data_path.to_str().unwrap().to_string()).map_err(|_| error!("Was not able to set the data directory.")).unwrap();
CONFIG_DIRECTORY.set(app.path().app_config_dir().unwrap().to_str().unwrap().to_string()).map_err(|_| error!("Was not able to set the config directory.")).unwrap();
CONFIG_DIRECTORY.set(app.path_resolver().app_config_dir().unwrap().to_str().unwrap().to_string()).map_err(|_| error!("Was not able to set the config directory.")).unwrap();
if is_dev() {
#[cfg(debug_assertions)]
create_startup_env_file();
} else {
cleanup_dotnet_server();
start_dotnet_server(app.handle().clone());
start_dotnet_server();
}
cleanup_qdrant();
start_qdrant_server(app.handle().clone());
start_qdrant_server(app.path_resolver());
info!(Source = "Bootloader Tauri"; "Reconfigure the file logger to use the app data directory {data_path:?}");
switch_to_file_logging(data_path).map_err(|e| error!("Failed to switch logging to file: {e}")).unwrap();
set_pdfium_path(app.path());
set_pdfium_path(app.path_resolver());
Ok(())
})
@ -162,7 +129,7 @@ pub fn start_tauri() {
.expect("Error while running Tauri application");
// The app event handler:
app.run(|_app_handle, event| {
app.run(|app_handle, event| {
if !matches!(event, RunEvent::MainEventsCleared) {
debug!(Source = "Tauri"; "Tauri event received: location=app event handler , event={event:?}");
}
@ -182,6 +149,54 @@ pub fn start_tauri() {
}
}
RunEvent::Updater(updater_event) => {
match updater_event {
UpdaterEvent::UpdateAvailable { body, date, version } => {
let body_len = body.len();
info!(Source = "Tauri"; "Updater: update available: body size={body_len} time={date:?} version={version}");
}
UpdaterEvent::Pending => {
info!(Source = "Tauri"; "Updater: update is pending!");
}
UpdaterEvent::DownloadProgress { chunk_length, content_length: _ } => {
trace!(Source = "Tauri"; "Updater: downloading chunk of {chunk_length} bytes");
}
UpdaterEvent::Downloaded => {
info!(Source = "Tauri"; "Updater: update has been downloaded!");
warn!(Source = "Tauri"; "Try to stop the .NET server now...");
if is_prod() {
stop_dotnet_server();
stop_qdrant_server();
} else {
warn!(Source = "Tauri"; "Development environment detected; do not stop the .NET server.");
}
}
UpdaterEvent::Updated => {
info!(Source = "Tauri"; "Updater: app has been updated");
warn!(Source = "Tauri"; "Try to restart the app now...");
if is_prod() {
app_handle.restart();
} else {
warn!(Source = "Tauri"; "Development environment detected; do not restart the app.");
}
}
UpdaterEvent::AlreadyUpToDate => {
info!(Source = "Tauri"; "Updater: app is already up to date");
}
UpdaterEvent::Error(error) => {
warn!(Source = "Tauri"; "Updater: failed to update: {error}");
}
}
}
RunEvent::ExitRequested { .. } => {
warn!(Source = "Tauri"; "Run event: exit was requested.");
stop_qdrant_server();
@ -202,62 +217,13 @@ pub fn start_tauri() {
warn!(Source = "Tauri"; "Tauri app was stopped.");
}
fn is_local_host(host: Option<&str>) -> bool {
matches!(host, Some("localhost") | Some("127.0.0.1") | Some("::1") | Some("[::1]"))
}
fn is_tauri_asset_host(host: Option<&str>) -> bool {
matches!(host, Some("tauri.localhost"))
}
fn is_tauri_asset_url(url: &tauri::Url) -> bool {
matches!(url.scheme(), "http" | "https") && is_tauri_asset_host(url.host_str())
}
fn is_local_http_url(url: &tauri::Url) -> bool {
matches!(url.scheme(), "http" | "https") && is_local_host(url.host_str())
}
fn same_origin(left: &tauri::Url, right: &tauri::Url) -> bool {
left.scheme() == right.scheme()
&& left.host_str() == right.host_str()
&& left.port_or_known_default() == right.port_or_known_default()
}
fn should_open_in_system_browser<R: tauri::Runtime>(webview: &tauri::Webview<R>, url: &tauri::Url) -> bool {
match url.scheme() {
"mailto" | "tel" => return true,
"http" | "https" => {},
_ => return false,
}
if is_tauri_asset_url(url) {
return false;
}
if let Some(approved_app_url) = APPROVED_APP_URL.lock().unwrap().as_ref() {
if same_origin(approved_app_url, url) {
return false;
}
if is_local_http_url(url) {
return true;
}
}
if let Ok(current_url) = webview.url() && same_origin(&current_url, url) {
return false;
}
!is_local_host(url.host_str())
}
/// Our event API endpoint for Tauri events. We try to send an endless stream of events to the client.
/// If no events are available for a certain time, we send a ping event to keep the connection alive.
/// When the client disconnects, the stream is closed. But we try to not lose events in between.
/// The client is expected to reconnect automatically when the connection is closed and continue
/// listening for events.
pub async fn get_event_stream(_token: APIToken) -> Response {
#[get("/events")]
pub async fn get_event_stream(_token: APIToken) -> TextStream![String] {
// Get the lock to the event broadcast sender:
let event_broadcast_lock = EVENT_BROADCAST.lock().unwrap();
@ -269,7 +235,8 @@ pub async fn get_event_stream(_token: APIToken) -> Response {
// Drop the lock to allow other access to the sender:
drop(event_broadcast_lock);
let stream = stream! {
// Create the event stream:
TextStream! {
loop {
// Wait at most 3 seconds for an event:
match time::timeout(Duration::from_secs(3), event_receiver.recv()).await {
@ -280,11 +247,11 @@ pub async fn get_event_stream(_token: APIToken) -> Response {
// is serialized as a single line so that the client can parse it
// correctly:
let event_json = serde_json::to_string(&event).unwrap();
yield Ok::<Bytes, Infallible>(Bytes::from(event_json));
yield event_json;
// The client expects a newline after each event because we are using
// a method to read the stream line-by-line:
yield Ok::<Bytes, Infallible>(Bytes::from("\n"));
yield "\n".to_string();
},
// Case: we lagged behind and missed some events
@ -304,17 +271,15 @@ pub async fn get_event_stream(_token: APIToken) -> Response {
// Again, we have to serialize the event as a single line:
let event_json = serde_json::to_string(&ping_event).unwrap();
yield Ok::<Bytes, Infallible>(Bytes::from(event_json));
yield event_json;
// The client expects a newline after each event because we are using
// a method to read the stream line-by-line:
yield Ok::<Bytes, Infallible>(Bytes::from("\n"));
yield "\n".to_string();
},
}
}
};
([(CONTENT_TYPE, "application/jsonl")], Body::from_stream(stream)).into_response()
}
}
/// Data structure representing a Tauri event for our event API.
@ -338,21 +303,23 @@ impl Event {
/// Creates an Event instance from a Tauri WindowEvent.
pub fn from_window_event(window_event: &WindowEvent) -> Self {
match window_event {
WindowEvent::DragDrop(drop_event) => {
WindowEvent::FileDrop(drop_event) => {
match drop_event {
DragDropEvent::Enter { paths, .. } => Event::new(
TauriEventType::FileDropHovered,
paths.iter().map(|p| p.display().to_string()).collect(),
FileDropEvent::Hovered(files) => Event::new(TauriEventType::FileDropHovered,
files.iter().map(|f| f.to_string_lossy().to_string()).collect(),
),
DragDropEvent::Drop { paths, .. } => Event::new(
TauriEventType::FileDropDropped,
paths.iter().map(|p| p.display().to_string()).collect(),
FileDropEvent::Dropped(files) => Event::new(TauriEventType::FileDropDropped,
files.iter().map(|f| f.to_string_lossy().to_string()).collect(),
),
DragDropEvent::Leave => Event::new(TauriEventType::FileDropCanceled, Vec::new()),
FileDropEvent::Cancelled => Event::new(TauriEventType::FileDropCanceled,
Vec::new(),
),
_ => Event::new(TauriEventType::Unknown, Vec::new()),
_ => Event::new(TauriEventType::Unknown,
Vec::new(),
),
}
},
@ -413,10 +380,6 @@ pub async fn change_location_to(url: &str) {
}
}
if let Ok(parsed_url) = tauri::Url::parse(url) && is_local_http_url(&parsed_url) {
*APPROVED_APP_URL.lock().unwrap() = Some(parsed_url);
}
let js_location_change = format!("window.location = '{url}';");
let main_window = main_window_spawn_clone.lock().unwrap();
let location_change_result = main_window.as_ref().unwrap().eval(js_location_change.as_str());
@ -427,6 +390,7 @@ pub async fn change_location_to(url: &str) {
}
/// Checks for updates.
#[get("/updates/check")]
pub async fn check_for_update(_token: APIToken) -> Json<CheckUpdateResponse> {
if is_dev() {
warn!(Source = "Updater"; "The app is running in development mode; skipping update check.");
@ -438,67 +402,46 @@ pub async fn check_for_update(_token: APIToken) -> Json<CheckUpdateResponse> {
});
}
let app_handle = {
let main_window = MAIN_WINDOW.lock().unwrap();
match main_window.as_ref() {
Some(window) => window.app_handle().clone(),
None => {
error!(Source = "Updater"; "Cannot check updates: main window not available.");
return Json(CheckUpdateResponse {
let app_handle = MAIN_WINDOW.lock().unwrap().as_ref().unwrap().app_handle();
let response = app_handle.updater().check().await;
match response {
Ok(update_response) => match update_response.is_update_available() {
true => {
*CHECK_UPDATE_RESPONSE.lock().unwrap() = Some(update_response.clone());
let new_version = update_response.latest_version();
info!(Source = "Updater"; "An update to version '{new_version}' is available.");
let changelog = update_response.body();
Json(CheckUpdateResponse {
update_is_available: true,
error: false,
new_version: new_version.to_string(),
changelog: match changelog {
Some(c) => c.to_string(),
None => String::from(""),
},
})
},
false => {
info!(Source = "Updater"; "No updates are available.");
Json(CheckUpdateResponse {
update_is_available: false,
error: true,
error: false,
new_version: String::from(""),
changelog: String::from(""),
});
}
}
};
let response = match app_handle.updater() {
Ok(updater) => updater.check().await,
Err(e) => {
warn!(Source = "Updater"; "Failed to get updater instance: {e}");
return Json(CheckUpdateResponse {
update_is_available: false,
error: true,
new_version: String::from(""),
changelog: String::from(""),
});
}
};
})
},
},
match response {
Ok(Some(update)) => {
let body_len = update.body.as_ref().map_or(0, |body| body.len());
let date = update.date;
let new_version = update.version.clone();
info!(Source = "Tauri"; "Updater: update available: body size={body_len} time={date:?} version={new_version}");
let changelog = update.body.clone().unwrap_or_default();
*CHECK_UPDATE_RESPONSE.lock().unwrap() = Some(update);
Json(CheckUpdateResponse {
update_is_available: true,
error: false,
new_version,
changelog,
})
}
Ok(None) => {
info!(Source = "Tauri"; "Updater: app is already up to date");
Json(CheckUpdateResponse {
update_is_available: false,
error: false,
new_version: String::from(""),
changelog: String::from(""),
})
}
Err(e) => {
warn!(Source = "Tauri"; "Updater: failed to update: {e}");
warn!(Source = "Updater"; "Failed to check for updates: {e}.");
Json(CheckUpdateResponse {
update_is_available: false,
error: true,
new_version: String::from(""),
changelog: String::from(""),
})
}
},
}
}
@ -512,6 +455,7 @@ pub struct CheckUpdateResponse {
}
/// Installs the update.
#[get("/updates/install")]
pub async fn install_update(_token: APIToken) {
if is_dev() {
warn!(Source = "Updater"; "The app is running in development mode; skipping update installation.");
@ -519,51 +463,9 @@ pub async fn install_update(_token: APIToken) {
}
let cloned_response_option = CHECK_UPDATE_RESPONSE.lock().unwrap().clone();
let app_handle = MAIN_WINDOW
.lock()
.unwrap()
.as_ref()
.map(|window| window.app_handle().clone());
match cloned_response_option {
Some(update_response) => {
info!(Source = "Tauri"; "Updater: update is pending!");
let result = update_response.download_and_install(
|chunk_length, _content_length| {
trace!(Source = "Tauri"; "Updater: downloading chunk of {chunk_length} bytes");
},
|| {
info!(Source = "Tauri"; "Updater: update has been downloaded!");
warn!(Source = "Tauri"; "Try to stop the .NET server now...");
if is_prod() {
stop_dotnet_server();
stop_qdrant_server();
} else {
warn!(Source = "Tauri"; "Development environment detected; do not stop the .NET server.");
}
},
).await;
match result {
Ok(_) => {
info!(Source = "Tauri"; "Updater: app has been updated");
warn!(Source = "Tauri"; "Try to restart the app now...");
if is_prod() {
if let Some(handle) = app_handle {
handle.restart();
} else {
warn!(Source = "Tauri"; "Cannot restart after update: main window not available.");
}
} else {
warn!(Source = "Tauri"; "Development environment detected; do not restart the app.");
}
}
Err(e) => {
warn!(Source = "Tauri"; "Updater: failed to update: {e}");
}
}
update_response.download_and_install().await.unwrap();
},
None => {
@ -572,6 +474,269 @@ pub async fn install_update(_token: APIToken) {
}
}
/// Let the user select a directory.
#[post("/select/directory?<title>", data = "<previous_directory>")]
pub fn select_directory(
_token: APIToken,
title: &str,
previous_directory: Option<Json<PreviousDirectory>>,
) -> Json<DirectorySelectionResponse> {
let folder_path = match previous_directory {
Some(previous) => {
let previous_path = previous.path.as_str();
create_file_dialog()
.set_title(title)
.set_directory(previous_path)
.pick_folder()
},
None => create_file_dialog().set_title(title).pick_folder(),
};
match folder_path {
Some(path) => {
info!("User selected directory: {path:?}");
Json(DirectorySelectionResponse {
user_cancelled: false,
selected_directory: path.to_str().unwrap().to_string(),
})
},
None => {
info!("User cancelled directory selection.");
Json(DirectorySelectionResponse {
user_cancelled: true,
selected_directory: String::from(""),
})
},
}
}
#[derive(Clone, Deserialize)]
pub struct PreviousDirectory {
path: String,
}
#[derive(Clone, Deserialize)]
pub struct FileTypeFilter {
filter_name: String,
filter_extensions: Vec<String>,
}
#[derive(Clone, Deserialize)]
pub struct SelectFileOptions {
title: String,
previous_file: Option<PreviousFile>,
filter: Option<FileTypeFilter>,
}
#[derive(Clone, Deserialize)]
pub struct SaveFileOptions {
title: String,
name_file: Option<PreviousFile>,
filter: Option<FileTypeFilter>,
}
#[derive(Serialize)]
pub struct DirectorySelectionResponse {
user_cancelled: bool,
selected_directory: String,
}
/// Let the user select a file.
#[post("/select/file", data = "<payload>")]
pub fn select_file(
_token: APIToken,
payload: Json<SelectFileOptions>,
) -> Json<FileSelectionResponse> {
// Create a new file dialog builder:
let file_dialog = create_file_dialog();
// Set the title of the file dialog:
let file_dialog = file_dialog.set_title(&payload.title);
// Set the file type filter if provided:
let file_dialog = apply_filter(file_dialog, &payload.filter);
// Set the previous file path if provided:
let file_dialog = match &payload.previous_file {
Some(previous) => {
let previous_path = previous.file_path.as_str();
file_dialog.set_directory(previous_path)
},
None => file_dialog,
};
// Show the file dialog and get the selected file path:
let file_path = file_dialog.pick_file();
match file_path {
Some(path) => {
info!("User selected file: {path:?}");
Json(FileSelectionResponse {
user_cancelled: false,
selected_file_path: path.to_str().unwrap().to_string(),
})
},
None => {
info!("User cancelled file selection.");
Json(FileSelectionResponse {
user_cancelled: true,
selected_file_path: String::from(""),
})
},
}
}
/// Let the user select some files.
#[post("/select/files", data = "<payload>")]
pub fn select_files(
_token: APIToken,
payload: Json<SelectFileOptions>,
) -> Json<FilesSelectionResponse> {
// Create a new file dialog builder:
let file_dialog = create_file_dialog();
// Set the title of the file dialog:
let file_dialog = file_dialog.set_title(&payload.title);
// Set the file type filter if provided:
let file_dialog = apply_filter(file_dialog, &payload.filter);
// Set the previous file path if provided:
let file_dialog = match &payload.previous_file {
Some(previous) => {
let previous_path = previous.file_path.as_str();
file_dialog.set_directory(previous_path)
},
None => file_dialog,
};
// Show the file dialog and get the selected file path:
let file_paths = file_dialog.pick_files();
match file_paths {
Some(paths) => {
info!("User selected {} files.", paths.len());
Json(FilesSelectionResponse {
user_cancelled: false,
selected_file_paths: paths
.iter()
.map(|p| p.to_str().unwrap().to_string())
.collect(),
})
}
None => {
info!("User cancelled file selection.");
Json(FilesSelectionResponse {
user_cancelled: true,
selected_file_paths: Vec::new(),
})
},
}
}
#[post("/save/file", data = "<payload>")]
pub fn save_file(_token: APIToken, payload: Json<SaveFileOptions>) -> Json<FileSaveResponse> {
// Create a new file dialog builder:
let file_dialog = create_file_dialog();
// Set the title of the file dialog:
let file_dialog = file_dialog.set_title(&payload.title);
// Set the file type filter if provided:
let file_dialog = apply_filter(file_dialog, &payload.filter);
// Set the previous file path if provided:
let file_dialog = match &payload.name_file {
Some(previous) => {
let previous_path = previous.file_path.as_str();
file_dialog.set_directory(previous_path)
},
None => file_dialog,
};
// Displays the file dialogue box and select the file:
let file_path = file_dialog.save_file();
match file_path {
Some(path) => {
info!("User selected file for writing operation: {path:?}");
Json(FileSaveResponse {
user_cancelled: false,
save_file_path: path.to_str().unwrap().to_string(),
})
},
None => {
info!("User cancelled file selection.");
Json(FileSaveResponse {
user_cancelled: true,
save_file_path: String::from(""),
})
},
}
}
#[derive(Clone, Deserialize)]
pub struct PreviousFile {
file_path: String,
}
/// Creates a file dialog builder and assigns the main window as parent where supported.
fn create_file_dialog() -> FileDialogBuilder {
let file_dialog = FileDialogBuilder::new();
#[cfg(any(windows, target_os = "macos"))]
{
let main_window_lock = MAIN_WINDOW.lock().unwrap();
match main_window_lock.as_ref() {
Some(window) => file_dialog.set_parent(window),
None => {
warn!(Source = "Tauri"; "Cannot assign parent window to file dialog: main window not available.");
file_dialog
}
}
}
#[cfg(not(any(windows, target_os = "macos")))]
{
file_dialog
}
}
/// Applies an optional file type filter to a FileDialogBuilder.
fn apply_filter(file_dialog: FileDialogBuilder, filter: &Option<FileTypeFilter>) -> FileDialogBuilder {
match filter {
Some(f) => file_dialog.add_filter(
&f.filter_name,
&f.filter_extensions.iter().map(|s| s.as_str()).collect::<Vec<&str>>(),
),
None => file_dialog,
}
}
#[derive(Serialize)]
pub struct FileSelectionResponse {
user_cancelled: bool,
selected_file_path: String,
}
#[derive(Serialize)]
pub struct FilesSelectionResponse {
user_cancelled: bool,
selected_file_paths: Vec<String>,
}
#[derive(Serialize)]
pub struct FileSaveResponse {
user_cancelled: bool,
save_file_path: String,
}
/// Request payload for registering a global shortcut.
#[derive(Clone, Deserialize)]
pub struct RegisterShortcutRequest {
@ -600,41 +765,47 @@ pub struct AppExitResponse {
/// Internal helper function to register a shortcut with its callback.
/// This is used by both `register_shortcut` and `resume_shortcuts` to
/// avoid code duplication.
fn register_shortcut_with_callback<R: tauri::Runtime>(
app_handle: &tauri::AppHandle<R>,
fn register_shortcut_with_callback(
shortcut_manager: &mut impl GlobalShortcutManager,
shortcut: &str,
shortcut_id: Shortcut,
event_sender: broadcast::Sender<Event>,
) -> Result<(), tauri_plugin_global_shortcut::Error> {
let shortcut_manager = app_handle.global_shortcut();
shortcut_manager.on_shortcut(shortcut, move |_app, _shortcut, _event| {
) -> Result<(), tauri::Error> {
//
// Match the shortcut registration to transform the Tauri result into the Rust result:
//
match shortcut_manager.register(shortcut, move || {
info!(Source = "Tauri"; "Global shortcut triggered for '{}'.", shortcut_id);
let event = Event::new(TauriEventType::GlobalShortcutPressed, vec![shortcut_id.to_string()]);
let sender = event_sender.clone();
tauri::async_runtime::spawn(async move {
if let Err(error) = sender.send(event) {
error!(Source = "Tauri"; "Failed to send global shortcut event: {error}");
match sender.send(event) {
Ok(_) => {}
Err(error) => error!(Source = "Tauri"; "Failed to send global shortcut event: {error}"),
}
});
})
}) {
Ok(_) => Ok(()),
Err(e) => Err(e.into()),
}
}
/// Requests a controlled shutdown of the entire desktop application.
pub async fn exit_app(_token: APIToken) -> Json<AppExitResponse> {
let app_handle = {
let main_window_lock = MAIN_WINDOW.lock().unwrap();
match main_window_lock.as_ref() {
Some(window) => window.app_handle().clone(),
None => {
error!(Source = "Tauri"; "Cannot exit app: main window not available.");
return Json(AppExitResponse {
success: false,
error_message: "Main window not available".to_string(),
});
}
#[post("/app/exit")]
pub fn exit_app(_token: APIToken) -> Json<AppExitResponse> {
let main_window_lock = MAIN_WINDOW.lock().unwrap();
let main_window = match main_window_lock.as_ref() {
Some(window) => window,
None => {
error!(Source = "Tauri"; "Cannot exit app: main window not available.");
return Json(AppExitResponse {
success: false,
error_message: "Main window not available".to_string(),
});
}
};
let app_handle = main_window.app_handle();
info!(Source = "Tauri"; "Controlled app exit was requested by the UI.");
tauri::async_runtime::spawn(async move {
time::sleep(Duration::from_millis(50)).await;
@ -649,7 +820,8 @@ pub async fn exit_app(_token: APIToken) -> Json<AppExitResponse> {
/// Registers or updates a global shortcut. If the shortcut string is empty,
/// the existing shortcut for that name will be unregistered.
pub async fn register_shortcut(_token: APIToken, payload: Json<RegisterShortcutRequest>) -> Json<ShortcutResponse> {
#[post("/shortcuts/register", data = "<payload>")]
pub fn register_shortcut(_token: APIToken, payload: Json<RegisterShortcutRequest>) -> Json<ShortcutResponse> {
let id = payload.id;
let new_shortcut = payload.shortcut.clone();
@ -676,15 +848,16 @@ pub async fn register_shortcut(_token: APIToken, payload: Json<RegisterShortcutR
}
};
let app_handle = main_window.app_handle();
let shortcut_manager = app_handle.global_shortcut();
let mut shortcut_manager = main_window.app_handle().global_shortcut_manager();
let mut registered_shortcuts = REGISTERED_SHORTCUTS.lock().unwrap();
// Unregister the old shortcut if one exists for this name:
if let Some(old_shortcut) = registered_shortcuts.get(&id) && !old_shortcut.is_empty() {
match shortcut_manager.unregister(old_shortcut.as_str()) {
Ok(_) => info!(Source = "Tauri"; "Unregistered old shortcut '{old_shortcut}' for '{}'.", id),
Err(error) => warn!(Source = "Tauri"; "Failed to unregister old shortcut '{old_shortcut}': {error}"),
if let Some(old_shortcut) = registered_shortcuts.get(&id) {
if !old_shortcut.is_empty() {
match shortcut_manager.unregister(old_shortcut.as_str()) {
Ok(_) => info!(Source = "Tauri"; "Unregistered old shortcut '{old_shortcut}' for '{}'.", id),
Err(error) => warn!(Source = "Tauri"; "Failed to unregister old shortcut '{old_shortcut}': {error}"),
}
}
}
@ -714,7 +887,7 @@ pub async fn register_shortcut(_token: APIToken, payload: Json<RegisterShortcutR
drop(event_broadcast_lock);
// Register the new shortcut:
match register_shortcut_with_callback(app_handle, &new_shortcut, id, event_sender) {
match register_shortcut_with_callback(&mut shortcut_manager, &new_shortcut, id, event_sender) {
Ok(_) => {
info!(Source = "Tauri"; "Global shortcut '{new_shortcut}' registered successfully for '{}'.", id);
registered_shortcuts.insert(id, new_shortcut);
@ -754,7 +927,8 @@ pub struct ShortcutValidationResponse {
/// Validates a shortcut string without registering it.
/// Checks if the shortcut syntax is valid and if it
/// conflicts with existing shortcuts.
pub async fn validate_shortcut(_token: APIToken, payload: Json<ValidateShortcutRequest>) -> Json<ShortcutValidationResponse> {
#[post("/shortcuts/validate", data = "<payload>")]
pub fn validate_shortcut(_token: APIToken, payload: Json<ValidateShortcutRequest>) -> Json<ShortcutValidationResponse> {
let shortcut = payload.shortcut.clone();
// Empty shortcuts are always valid (means "disabled"):
@ -808,7 +982,8 @@ pub async fn validate_shortcut(_token: APIToken, payload: Json<ValidateShortcutR
/// The shortcuts remain in our internal map, so they can be re-registered on resume.
/// This is useful when opening a dialog to configure shortcuts, so the user can
/// press the current shortcut to re-enter it without triggering the action.
pub async fn suspend_shortcuts(_token: APIToken) -> Json<ShortcutResponse> {
#[post("/shortcuts/suspend")]
pub fn suspend_shortcuts(_token: APIToken) -> Json<ShortcutResponse> {
// Get the main window to access the global shortcut manager:
let main_window_lock = MAIN_WINDOW.lock().unwrap();
let main_window = match main_window_lock.as_ref() {
@ -822,8 +997,7 @@ pub async fn suspend_shortcuts(_token: APIToken) -> Json<ShortcutResponse> {
}
};
let app_handle = main_window.app_handle();
let shortcut_manager = app_handle.global_shortcut();
let mut shortcut_manager = main_window.app_handle().global_shortcut_manager();
let registered_shortcuts = REGISTERED_SHORTCUTS.lock().unwrap();
// Unregister all shortcuts from the OS (but keep them in our map):
@ -844,7 +1018,8 @@ pub async fn suspend_shortcuts(_token: APIToken) -> Json<ShortcutResponse> {
}
/// Resumes shortcut processing by re-registering all shortcuts with the OS.
pub async fn resume_shortcuts(_token: APIToken) -> Json<ShortcutResponse> {
#[post("/shortcuts/resume")]
pub fn resume_shortcuts(_token: APIToken) -> Json<ShortcutResponse> {
// Get the main window to access the global shortcut manager:
let main_window_lock = MAIN_WINDOW.lock().unwrap();
let main_window = match main_window_lock.as_ref() {
@ -858,7 +1033,7 @@ pub async fn resume_shortcuts(_token: APIToken) -> Json<ShortcutResponse> {
}
};
let app_handle = main_window.app_handle();
let mut shortcut_manager = main_window.app_handle().global_shortcut_manager();
let registered_shortcuts = REGISTERED_SHORTCUTS.lock().unwrap();
// Get the event broadcast sender for the shortcut callbacks:
@ -883,7 +1058,7 @@ pub async fn resume_shortcuts(_token: APIToken) -> Json<ShortcutResponse> {
continue;
}
match register_shortcut_with_callback(app_handle, shortcut, *shortcut_id, event_sender.clone()) {
match register_shortcut_with_callback(&mut shortcut_manager, shortcut, *shortcut_id, event_sender.clone()) {
Ok(_) => {
info!(Source = "Tauri"; "Re-registered shortcut '{shortcut}' for '{}'.", shortcut_id);
success_count += 1;
@ -944,61 +1119,15 @@ fn validate_shortcut_syntax(shortcut: &str) -> bool {
has_key
}
fn set_pdfium_path<R: tauri::Runtime>(path_resolver: &PathResolver<R>) {
let resource_dir = match path_resolver.resource_dir() {
Ok(path) => path,
Err(error) => {
error!(Source = "Bootloader Tauri"; "Failed to resolve resource dir: {error}");
return;
}
};
let candidate_paths = [
resource_dir.join("resources").join("libraries"),
resource_dir.join("libraries"),
];
let pdfium_source_path = candidate_paths
.iter()
.find(|path| path.exists())
.map(|path| path.to_string_lossy().to_string());
match pdfium_source_path {
Some(path) => {
*PDFIUM_LIB_PATH.lock().unwrap() = Some(path);
}
None => {
error!(Source = "Bootloader Tauri"; "Failed to set the PDFium library path.");
}
fn set_pdfium_path(path_resolver: PathResolver) {
let pdfium_relative_source_path = String::from("resources/libraries/");
let pdfium_source_path = path_resolver.resolve_resource(pdfium_relative_source_path);
if pdfium_source_path.is_none() {
error!(Source = "Bootloader Tauri"; "Failed to set the PDFium library path.");
return;
}
let pdfium_source_path = pdfium_source_path.unwrap();
let pdfium_source_path = pdfium_source_path.to_str().unwrap().to_string();
*PDFIUM_LIB_PATH.lock().unwrap() = Some(pdfium_source_path.clone());
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn tauri_localhost_is_tauri_asset_url() {
let https_url = tauri::Url::parse("https://tauri.localhost/index.html").unwrap();
let http_url = tauri::Url::parse("http://tauri.localhost/index.html").unwrap();
assert!(is_tauri_asset_url(&https_url));
assert!(is_tauri_asset_url(&http_url));
}
#[test]
fn localhost_app_url_is_not_tauri_asset_url() {
let url = tauri::Url::parse("http://localhost:12345/").unwrap();
assert!(!is_tauri_asset_url(&url));
assert!(is_local_http_url(&url));
}
#[test]
fn external_url_is_not_internal_url() {
let url = tauri::Url::parse("https://example.com/").unwrap();
assert!(!is_tauri_asset_url(&url));
assert!(!is_local_http_url(&url));
}
}

View File

@ -1,13 +1,14 @@
use arboard::Clipboard;
use log::{debug, error};
use axum::Json;
use rocket::post;
use rocket::serde::json::Json;
use serde::Serialize;
use crate::api_token::APIToken;
use crate::encryption::{EncryptedText, ENCRYPTION};
/// Sets the clipboard text to the provided encrypted text.
pub async fn set_clipboard(_token: APIToken, encrypted_text: String) -> Json<SetClipboardResponse> {
let encrypted_text = EncryptedText::new(encrypted_text);
#[post("/clipboard/set", data = "<encrypted_text>")]
pub fn set_clipboard(_token: APIToken, encrypted_text: EncryptedText) -> Json<SetClipboardResponse> {
// Decrypt this text first:
let decrypted_text = match ENCRYPTION.decrypt(&encrypted_text) {

View File

@ -5,9 +5,9 @@ use base64::Engine;
use base64::prelude::BASE64_STANDARD;
use log::{error, info, warn};
use once_cell::sync::Lazy;
use rocket::get;
use tauri::api::process::{Command, CommandChild, CommandEvent};
use tauri::Url;
use tauri_plugin_shell::process::{CommandChild, CommandEvent};
use tauri_plugin_shell::ShellExt;
use crate::api_token::APIToken;
use crate::runtime_api_token::API_TOKEN;
use crate::app_window::change_location_to;
@ -88,7 +88,8 @@ fn sanitize_stdout_line(line: &str) -> String {
/// Returns the desired port of the .NET server. Our .NET app calls this endpoint to get
/// the port where the .NET server should listen to.
pub async fn dotnet_port(_token: APIToken) -> String {
#[get("/system/dotnet/port")]
pub fn dotnet_port(_token: APIToken) -> String {
let dotnet_server_port = *DOTNET_SERVER_PORT;
format!("{dotnet_server_port}")
}
@ -129,14 +130,14 @@ pub fn create_startup_env_file() {
}
/// Starts the .NET server in a separate process.
pub fn start_dotnet_server<R: tauri::Runtime>(app_handle: tauri::AppHandle<R>) {
pub fn start_dotnet_server() {
// Get the secret password & salt and convert it to a base64 string:
let secret_password = BASE64_STANDARD.encode(ENCRYPTION.secret_password);
let secret_key_salt = BASE64_STANDARD.encode(ENCRYPTION.secret_key_salt);
let api_port = *API_SERVER_PORT;
let dotnet_server_environment: HashMap<String, String> = HashMap::from_iter([
let dotnet_server_environment = HashMap::from_iter([
(String::from("AI_STUDIO_SECRET_PASSWORD"), secret_password),
(String::from("AI_STUDIO_SECRET_KEY_SALT"), secret_key_salt),
(String::from("AI_STUDIO_CERTIFICATE_FINGERPRINT"), CERTIFICATE_FINGERPRINT.get().unwrap().to_string()),
@ -147,13 +148,11 @@ pub fn start_dotnet_server<R: tauri::Runtime>(app_handle: tauri::AppHandle<R>) {
info!("Try to start the .NET server...");
let server_spawn_clone = DOTNET_SERVER.clone();
tauri::async_runtime::spawn(async move {
let shell = app_handle.shell();
let (mut rx, child) = shell
.sidecar("mindworkAIStudioServer")
.expect("Failed to create sidecar")
.envs(dotnet_server_environment)
.spawn()
.expect("Failed to spawn .NET server process.");
let (mut rx, child) = Command::new_sidecar("mindworkAIStudioServer")
.expect("Failed to create sidecar")
.envs(dotnet_server_environment)
.spawn()
.expect("Failed to spawn .NET server process.");
let server_pid = child.pid();
info!(Source = "Bootloader .NET"; "The .NET server process started with PID={server_pid}.");
log_potential_stale_process(Path::new(DATA_DIRECTORY.get().unwrap()).join(PID_FILE_NAME), server_pid, SIDECAR_TYPE);
@ -164,19 +163,17 @@ pub fn start_dotnet_server<R: tauri::Runtime>(app_handle: tauri::AppHandle<R>) {
// Log the output of the .NET server:
// NOTE: Log events are sent via structured HTTP API calls.
// This loop serves for fundamental output (e.g., startup errors).
while let Some(event) = rx.recv().await {
if let CommandEvent::Stdout(line) = event {
let line_utf8 = String::from_utf8_lossy(&line).to_string();
let line = sanitize_stdout_line(line_utf8.trim_end());
if !line.trim().is_empty() {
info!(Source = ".NET Server (stdout)"; "{line}");
}
while let Some(CommandEvent::Stdout(line)) = rx.recv().await {
let line = sanitize_stdout_line(line.trim_end());
if !line.trim().is_empty() {
info!(Source = ".NET Server (stdout)"; "{line}");
}
}
});
}
/// This endpoint is called by the .NET server to signal that the server is ready.
#[get("/system/dotnet/ready")]
pub async fn dotnet_ready(_token: APIToken) {
// We create a manual scope for the lock to be released as soon as possible.

View File

@ -2,20 +2,26 @@ use std::fmt;
use std::time::Instant;
use base64::Engine;
use base64::prelude::BASE64_STANDARD;
use aes::cipher::{block_padding::Pkcs7, BlockModeDecrypt, BlockModeEncrypt, KeyIvInit};
use aes::cipher::{block_padding::Pkcs7, BlockDecryptMut, BlockEncryptMut, KeyIvInit};
use hmac::Hmac;
use log::{error, info};
use once_cell::sync::Lazy;
use pbkdf2::pbkdf2;
use rand::rngs::SysRng;
use rand::{Rng, SeedableRng};
use serde::{Deserialize, Serialize};
use rocket::{data, Data, Request};
use rocket::data::ToByteUnit;
use rocket::http::Status;
use rocket::serde::{Deserialize, Serialize};
use sha2::Sha512;
use tokio::io::AsyncReadExt;
type Aes256CbcEnc = cbc::Encryptor<aes::Aes256>;
type Aes256CbcDec = cbc::Decryptor<aes::Aes256>;
type DataOutcome<'r, T> = data::Outcome<'r, T>;
/// The encryption instance used for the IPC channel.
pub static ENCRYPTION: Lazy<Encryption> = Lazy::new(|| {
//
@ -107,7 +113,7 @@ impl Encryption {
let mut buffer = vec![0u8; data.len() + 16];
buffer[..data.len()].copy_from_slice(data);
let encrypted = cipher
.encrypt_padded::<Pkcs7>(&mut buffer, data.len())
.encrypt_padded_mut::<Pkcs7>(&mut buffer, data.len())
.map_err(|e| format!("Error encrypting data: {e}"))?;
let mut result = BASE64_STANDARD.encode(self.secret_key_salt);
result.push_str(&BASE64_STANDARD.encode(encrypted));
@ -130,7 +136,7 @@ impl Encryption {
let cipher = Aes256CbcDec::new(&self.key.into(), &self.iv.into());
let mut buffer = encrypted.to_vec();
let decrypted = cipher
.decrypt_padded::<Pkcs7>(&mut buffer)
.decrypt_padded_mut::<Pkcs7>(&mut buffer)
.map_err(|e| format!("Error decrypting data: {e}"))?;
String::from_utf8(decrypted.to_vec()).map_err(|e| format!("Error converting decrypted data to string: {}", e))
@ -164,4 +170,27 @@ impl fmt::Display for EncryptedText {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "**********")
}
}
/// Use Case: When we receive encrypted text from the client as body (e.g., in a POST request).
/// We must interpret the body as EncryptedText.
#[rocket::async_trait]
impl<'r> data::FromData<'r> for EncryptedText {
type Error = String;
/// Parses the data as EncryptedText.
async fn from_data(req: &'r Request<'_>, data: Data<'r>) -> DataOutcome<'r, Self> {
let content_type = req.content_type();
if content_type.map_or(true, |ct| !ct.is_text()) {
return DataOutcome::Forward((data, Status::Ok));
}
let mut stream = data.open(2.mebibytes());
let mut body = String::new();
if let Err(e) = stream.read_to_string(&mut body).await {
return DataOutcome::Error((Status::InternalServerError, format!("Failed to read data: {}", e)));
}
DataOutcome::Success(EncryptedText(body))
}
}

View File

@ -1,6 +1,7 @@
use crate::api_token::APIToken;
use axum::Json;
use log::{debug, info, warn};
use rocket::get;
use rocket::serde::json::Json;
use serde::Serialize;
use std::collections::{HashMap, HashSet};
use std::env;
@ -28,7 +29,8 @@ pub static CONFIG_DIRECTORY: OnceLock<String> = OnceLock::new();
static USER_LANGUAGE: OnceLock<String> = OnceLock::new();
/// Returns the config directory.
pub async fn get_config_directory(_token: APIToken) -> String {
#[get("/system/directories/config")]
pub fn get_config_directory(_token: APIToken) -> String {
match CONFIG_DIRECTORY.get() {
Some(config_directory) => config_directory.clone(),
None => String::from(""),
@ -36,7 +38,8 @@ pub async fn get_config_directory(_token: APIToken) -> String {
}
/// Returns the data directory.
pub async fn get_data_directory(_token: APIToken) -> String {
#[get("/system/directories/data")]
pub fn get_data_directory(_token: APIToken) -> String {
match DATA_DIRECTORY.get() {
Some(data_directory) => data_directory.clone(),
None => String::from(""),
@ -87,8 +90,10 @@ fn normalize_locale_tag(locale: &str) -> Option<String> {
return None;
}
if let Some(region) = segments.next() && region.len() == 2 && region.chars().all(|c| c.is_ascii_alphabetic()) {
return Some(format!("{}-{}", language, region.to_ascii_uppercase()));
if let Some(region) = segments.next() {
if region.len() == 2 && region.chars().all(|c| c.is_ascii_alphabetic()) {
return Some(format!("{}-{}", language, region.to_ascii_uppercase()));
}
}
Some(language)
@ -145,7 +150,8 @@ fn detect_user_language() -> (String, LanguageDetectionSource) {
)
}
pub async fn read_user_language(_token: APIToken) -> String {
#[get("/system/language")]
pub fn read_user_language(_token: APIToken) -> String {
USER_LANGUAGE
.get_or_init(|| {
let (user_language, source) = detect_user_language();
@ -188,7 +194,8 @@ struct EnterpriseSourceData {
encryption_secret: String,
}
pub async fn read_enterprise_env_config_id(_token: APIToken) -> String {
#[get("/system/enterprise/config/id")]
pub fn read_enterprise_env_config_id(_token: APIToken) -> String {
debug!("Trying to read the effective enterprise configuration ID.");
resolve_effective_enterprise_config_source()
.configs
@ -198,7 +205,8 @@ pub async fn read_enterprise_env_config_id(_token: APIToken) -> String {
.unwrap_or_default()
}
pub async fn read_enterprise_env_config_server_url(_token: APIToken) -> String {
#[get("/system/enterprise/config/server")]
pub fn read_enterprise_env_config_server_url(_token: APIToken) -> String {
debug!("Trying to read the effective enterprise configuration server URL.");
resolve_effective_enterprise_config_source()
.configs
@ -208,13 +216,15 @@ pub async fn read_enterprise_env_config_server_url(_token: APIToken) -> String {
.unwrap_or_default()
}
pub async fn read_enterprise_env_config_encryption_secret(_token: APIToken) -> String {
#[get("/system/enterprise/config/encryption_secret")]
pub fn read_enterprise_env_config_encryption_secret(_token: APIToken) -> String {
debug!("Trying to read the effective enterprise configuration encryption secret.");
resolve_effective_enterprise_secret_source().encryption_secret
}
/// Returns all enterprise configurations from the effective source.
pub async fn read_enterprise_configs(_token: APIToken) -> Json<Vec<EnterpriseConfig>> {
#[get("/system/enterprise/configs")]
pub fn read_enterprise_configs(_token: APIToken) -> Json<Vec<EnterpriseConfig>> {
info!("Trying to read the effective enterprise configurations.");
Json(resolve_effective_enterprise_config_source().configs)
}
@ -416,9 +426,10 @@ fn load_policy_values_from_directories(directories: &[PathBuf]) -> HashMap<Strin
}
let secret_path = directory.join(ENTERPRISE_POLICY_SECRET_FILE_NAME);
if let Some(secret_values) = read_policy_yaml_mapping(&secret_path)
&& let Some(secret) = secret_values.get("config_encryption_secret") {
insert_first_non_empty_value(&mut values, "config_encryption_secret", secret);
if let Some(secret_values) = read_policy_yaml_mapping(&secret_path) {
if let Some(secret) = secret_values.get("config_encryption_secret") {
insert_first_non_empty_value(&mut values, "config_encryption_secret", secret);
}
}
}

View File

@ -1,299 +0,0 @@
use log::{error, info};
use axum::extract::Query;
use axum::Json;
use serde::{Deserialize, Serialize};
use tauri_plugin_dialog::{DialogExt, FileDialogBuilder};
use crate::api_token::APIToken;
use crate::app_window::MAIN_WINDOW;
#[derive(Clone, Deserialize)]
pub struct PreviousDirectory {
path: String,
}
#[derive(Deserialize)]
pub struct SelectDirectoryQuery {
title: String,
}
#[derive(Clone, Deserialize)]
pub struct FileTypeFilter {
filter_name: String,
filter_extensions: Vec<String>,
}
#[derive(Clone, Deserialize)]
pub struct SelectFileOptions {
title: String,
previous_file: Option<PreviousFile>,
filter: Option<FileTypeFilter>,
}
#[derive(Clone, Deserialize)]
pub struct SaveFileOptions {
title: String,
name_file: Option<PreviousFile>,
filter: Option<FileTypeFilter>,
}
#[derive(Serialize)]
pub struct DirectorySelectionResponse {
user_cancelled: bool,
selected_directory: String,
}
#[derive(Serialize)]
pub struct FileSelectionResponse {
user_cancelled: bool,
selected_file_path: String,
}
#[derive(Serialize)]
pub struct FilesSelectionResponse {
user_cancelled: bool,
selected_file_paths: Vec<String>,
}
#[derive(Serialize)]
pub struct FileSaveResponse {
user_cancelled: bool,
save_file_path: String,
}
#[derive(Clone, Deserialize)]
pub struct PreviousFile {
file_path: String,
}
/// Let the user select a directory.
pub async fn select_directory(
_token: APIToken,
Query(query): Query<SelectDirectoryQuery>,
previous_directory: Option<Json<PreviousDirectory>>,
) -> Json<DirectorySelectionResponse> {
let main_window_lock = MAIN_WINDOW.lock().unwrap();
let main_window = match main_window_lock.as_ref() {
Some(window) => window,
None => {
error!(Source = "Tauri"; "Cannot open directory dialog: main window not available.");
return Json(DirectorySelectionResponse {
user_cancelled: true,
selected_directory: String::from(""),
});
}
};
let mut dialog = main_window.dialog().file().set_parent(main_window).set_title(&query.title);
if let Some(previous) = previous_directory {
dialog = dialog.set_directory(previous.path.clone());
}
drop(main_window_lock);
let folder_path = dialog.blocking_pick_folder();
match folder_path {
Some(path) => {
match path.into_path() {
Ok(pb) => {
info!("User selected directory: {pb:?}");
Json(DirectorySelectionResponse {
user_cancelled: false,
selected_directory: pb.to_string_lossy().to_string(),
})
}
Err(e) => {
error!(Source = "Tauri"; "Failed to convert directory path: {e}");
Json(DirectorySelectionResponse {
user_cancelled: true,
selected_directory: String::new(),
})
}
}
},
None => {
info!("User cancelled directory selection.");
Json(DirectorySelectionResponse {
user_cancelled: true,
selected_directory: String::from(""),
})
},
}
}
/// Let the user select a file.
pub async fn select_file(
_token: APIToken,
payload: Json<SelectFileOptions>,
) -> Json<FileSelectionResponse> {
// Create a new file dialog builder:
let file_dialog = MAIN_WINDOW
.lock()
.unwrap()
.as_ref()
.map(|w| w.dialog().file().set_parent(w).set_title(&payload.title));
let Some(mut file_dialog) = file_dialog else {
error!(Source = "Tauri"; "Cannot open file dialog: main window not available.");
return Json(FileSelectionResponse {
user_cancelled: true,
selected_file_path: String::from(""),
});
};
// Set the file type filter if provided:
file_dialog = apply_filter(file_dialog, &payload.filter);
// Set the previous file path if provided:
if let Some(previous) = &payload.previous_file {
let previous_path = previous.file_path.as_str();
file_dialog = file_dialog.set_directory(previous_path);
}
// Show the file dialog and get the selected file path:
let file_path = file_dialog.blocking_pick_file();
match file_path {
Some(path) => match path.into_path() {
Ok(pb) => {
info!("User selected file: {pb:?}");
Json(FileSelectionResponse {
user_cancelled: false,
selected_file_path: pb.to_string_lossy().to_string(),
})
}
Err(e) => {
error!(Source = "Tauri"; "Failed to convert file path: {e}");
Json(FileSelectionResponse {
user_cancelled: true,
selected_file_path: String::new(),
})
}
},
None => {
info!("User cancelled file selection.");
Json(FileSelectionResponse {
user_cancelled: true,
selected_file_path: String::from(""),
})
},
}
}
/// Let the user select some files.
pub async fn select_files(
_token: APIToken,
payload: Json<SelectFileOptions>,
) -> Json<FilesSelectionResponse> {
// Create a new file dialog builder:
let file_dialog = MAIN_WINDOW
.lock()
.unwrap()
.as_ref()
.map(|w| w.dialog().file().set_parent(w).set_title(&payload.title));
let Some(mut file_dialog) = file_dialog else {
error!(Source = "Tauri"; "Cannot open file dialog: main window not available.");
return Json(FilesSelectionResponse {
user_cancelled: true,
selected_file_paths: Vec::new(),
});
};
// Set the file type filter if provided:
file_dialog = apply_filter(file_dialog, &payload.filter);
// Set the previous file path if provided:
if let Some(previous) = &payload.previous_file {
let previous_path = previous.file_path.as_str();
file_dialog = file_dialog.set_directory(previous_path);
}
// Show the file dialog and get the selected file path:
let file_paths = file_dialog.blocking_pick_files();
match file_paths {
Some(paths) => {
let converted: Vec<String> = paths.into_iter().filter_map(|p| p.into_path().ok()).map(|pb| pb.to_string_lossy().to_string()).collect();
info!("User selected {} files.", converted.len());
Json(FilesSelectionResponse {
user_cancelled: false,
selected_file_paths: converted,
})
}
None => {
info!("User cancelled file selection.");
Json(FilesSelectionResponse {
user_cancelled: true,
selected_file_paths: Vec::new(),
})
},
}
}
pub async fn save_file(_token: APIToken, payload: Json<SaveFileOptions>) -> Json<FileSaveResponse> {
// Create a new file dialog builder:
let file_dialog = MAIN_WINDOW
.lock()
.unwrap()
.as_ref()
.map(|w| w.dialog().file().set_parent(w).set_title(&payload.title));
let Some(mut file_dialog) = file_dialog else {
error!(Source = "Tauri"; "Cannot open save dialog: main window not available.");
return Json(FileSaveResponse {
user_cancelled: true,
save_file_path: String::from(""),
});
};
// Set the file type filter if provided:
file_dialog = apply_filter(file_dialog, &payload.filter);
// Set the previous file path if provided:
if let Some(previous) = &payload.name_file {
let previous_path = previous.file_path.as_str();
file_dialog = file_dialog.set_directory(previous_path);
}
// Displays the file dialogue box and select the file:
let file_path = file_dialog.blocking_save_file();
match file_path {
Some(path) => match path.into_path() {
Ok(pb) => {
info!("User selected file for writing operation: {pb:?}");
Json(FileSaveResponse {
user_cancelled: false,
save_file_path: pb.to_string_lossy().to_string(),
})
}
Err(e) => {
error!(Source = "Tauri"; "Failed to convert save file path: {e}");
Json(FileSaveResponse {
user_cancelled: true,
save_file_path: String::new(),
})
}
},
None => {
info!("User cancelled file selection.");
Json(FileSaveResponse {
user_cancelled: true,
save_file_path: String::from(""),
})
},
}
}
/// Applies an optional file type filter to a FileDialogBuilder.
fn apply_filter<R: tauri::Runtime>(file_dialog: FileDialogBuilder<R>, filter: &Option<FileTypeFilter>) -> FileDialogBuilder<R> {
match filter {
Some(f) => file_dialog.add_filter(
&f.filter_name,
&f.filter_extensions.iter().map(|s| s.as_str()).collect::<Vec<&str>>(),
),
None => file_dialog,
}
}

View File

@ -1,24 +1,22 @@
use std::cmp::min;
use std::convert::Infallible;
use crate::api_token::APIToken;
use crate::pandoc::PandocProcessBuilder;
use crate::pdfium::PdfiumInit;
use async_stream::stream;
use axum::extract::Query;
use axum::extract::rejection::QueryRejection;
use axum::response::sse::{Event, Sse};
use base64::{engine::general_purpose, Engine as _};
use calamine::{open_workbook_auto, Reader};
use file_format::{FileFormat, Kind};
use futures::{Stream, StreamExt};
use pdfium_render::prelude::Pdfium;
use pptx_to_md::{ImageHandlingMode, ParserConfig, PptxContainer};
use serde::{Deserialize, Deserializer, Serialize};
use serde::de::{Error as SerdeError, Visitor};
use rocket::get;
use rocket::response::stream::{Event, EventStream};
use rocket::serde::Serialize;
use rocket::tokio::select;
use rocket::Shutdown;
use std::path::Path;
use std::pin::Pin;
use std::fmt;
use log::{debug, error, warn};
use log::{debug, error};
use tokio::io::AsyncBufReadExt;
use tokio::sync::mpsc;
use tokio_stream::wrappers::ReceiverStream;
@ -84,95 +82,39 @@ const IMAGE_SEGMENT_SIZE_IN_CHARS: usize = 8_192; // equivalent to ~ 5500 token
type Result<T> = std::result::Result<T, Box<dyn std::error::Error + Send + Sync>>;
type ChunkStream = Pin<Box<dyn Stream<Item = Result<Chunk>> + Send>>;
#[derive(Deserialize)]
pub struct ExtractDataQuery {
path: String,
stream_id: String,
#[serde(deserialize_with = "deserialize_bool_case_insensitive")]
extract_images: bool,
}
fn deserialize_bool_case_insensitive<'de, D>(deserializer: D) -> std::result::Result<bool, D::Error>
where
D: Deserializer<'de>,
{
struct BoolVisitor;
impl<'de> Visitor<'de> for BoolVisitor {
type Value = bool;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a boolean value")
}
fn visit_bool<E>(self, value: bool) -> std::result::Result<Self::Value, E> {
Ok(value)
}
fn visit_str<E>(self, value: &str) -> std::result::Result<Self::Value, E>
where
E: SerdeError,
{
match value.to_ascii_lowercase().as_str() {
"true" | "1" => Ok(true),
"false" | "0" => Ok(false),
_ => Err(E::invalid_value(serde::de::Unexpected::Str(value), &self)),
}
}
}
deserializer.deserialize_any(BoolVisitor)
}
pub async fn extract_data(
_token: APIToken,
query: std::result::Result<Query<ExtractDataQuery>, QueryRejection>,
) -> Sse<impl Stream<Item = std::result::Result<Event, Infallible>>> {
let query = match query {
Ok(Query(query)) => Ok(query),
Err(e) => {
let message = format!("Invalid query for '/retrieval/fs/extract': {e}");
warn!("{message}");
Err(message)
},
};
let stream = stream! {
match query {
Ok(query) => {
let stream_result = stream_data(&query.path, query.extract_images).await;
let id_ref = &query.stream_id;
match stream_result {
Ok(mut stream) => {
while let Some(chunk) = stream.next().await {
match chunk {
Ok(mut chunk) => {
chunk.set_stream_id(id_ref);
yield Ok(Event::default().json_data(&chunk).unwrap_or_else(|e| Event::default().data(format!("Error: {e}"))));
},
Err(e) => {
yield Ok(Event::default().json_data(format!("Error: {e}")).unwrap_or_else(|_| Event::default().data(format!("Error: {e}"))));
break;
},
}
}
},
Err(e) => {
yield Ok(Event::default().json_data(format!("Error starting stream: {e}")).unwrap_or_else(|_| Event::default().data(format!("Error starting stream: {e}"))));
}
};
#[get("/retrieval/fs/extract?<path>&<stream_id>&<extract_images>")]
pub async fn extract_data(_token: APIToken, path: String, stream_id: String, extract_images: bool, mut end: Shutdown) -> EventStream![] {
EventStream! {
let stream_result = stream_data(&path, extract_images).await;
let id_ref = &stream_id;
match stream_result {
Ok(mut stream) => {
loop {
let chunk = select! {
chunk = stream.next() => match chunk {
Some(Ok(mut chunk)) => {
chunk.set_stream_id(id_ref);
chunk
},
Some(Err(e)) => {
yield Event::json(&format!("Error: {e}"));
break;
},
None => break,
},
_ = &mut end => break,
};
yield Event::json(&chunk);
}
},
Err(e) => {
yield Ok(Event::default().json_data(format!("Error starting stream: {e}")).unwrap_or_else(|_| Event::default().data(format!("Error starting stream: {e}"))));
},
yield Event::json(&format!("Error starting stream: {e}"));
}
}
};
Sse::new(stream)
}
}
async fn stream_data(file_path: &str, extract_images: bool) -> Result<ChunkStream> {

View File

@ -17,5 +17,4 @@ pub mod qdrant;
pub mod certificate_factory;
pub mod runtime_api_token;
pub mod stale_process_cleanup;
mod sidecar_types;
mod file_actions;
mod sidecar_types;

View File

@ -8,8 +8,9 @@ use flexi_logger::{DeferredNow, Duplicate, FileSpec, Logger, LoggerHandle};
use flexi_logger::writers::FileLogWriter;
use log::{kv, Level};
use log::kv::{Key, Value, VisitSource};
use axum::Json;
use serde::{Deserialize, Serialize};
use rocket::{get, post};
use rocket::serde::json::Json;
use rocket::serde::{Deserialize, Serialize};
use crate::api_token::APIToken;
use crate::environment::is_dev;
@ -33,17 +34,14 @@ pub fn init_logging() {
false => log_config.push_str("info, "),
};
// Keep noisy HTTP/TLS internals at info level even in development builds:
log_config.push_str("h2=info, ");
log_config.push_str("hyper=info, ");
log_config.push_str("hyper_util=info, ");
log_config.push_str("axum=info, ");
log_config.push_str("axum_server=info, ");
log_config.push_str("tower=info, ");
log_config.push_str("tower_http=info, ");
log_config.push_str("rustls=info, ");
log_config.push_str("tokio_rustls=info, ");
log_config.push_str("reqwest=info");
// Set the log level for the Rocket library:
log_config.push_str("rocket=info, ");
// Set the log level for the Rocket server:
log_config.push_str("rocket::server=warn, ");
// Set the log level for the Reqwest library:
log_config.push_str("reqwest::async_impl::client=info");
// Configure the initial filename. On Unix systems, the file should start
// with a dot to be hidden.
@ -226,6 +224,7 @@ fn file_logger_format(
write!(w, "{}", &record.args())
}
#[get("/log/paths")]
pub async fn get_log_paths(_token: APIToken) -> Json<LogPathsResponse> {
Json(LogPathsResponse {
log_startup_path: LOG_STARTUP_PATH.get().expect("No startup log path was set").clone(),
@ -270,7 +269,9 @@ fn log_with_level(
}
/// Logs an event from the .NET server.
pub async fn log_event(_token: APIToken, Json(event): Json<LogEvent>) -> Json<LogEventResponse> {
#[post("/log/event", data = "<event>")]
pub fn log_event(_token: APIToken, event: Json<LogEvent>) -> Json<LogEventResponse> {
let event = event.into_inner();
let level = parse_dotnet_log_level(&event.level);
let message = event.message.as_str();
let category = event.category.as_str();

View File

@ -1,6 +1,7 @@
// Prevents an additional console window on Windows in release, DO NOT REMOVE!!
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
extern crate rocket;
extern crate core;
use log::{info, warn};
@ -10,7 +11,7 @@ use mindwork_ai_studio::environment::is_dev;
use mindwork_ai_studio::log::init_logging;
use mindwork_ai_studio::metadata::MetaData;
use mindwork_ai_studio::runtime_api::start_runtime_api;
use mindwork_ai_studio::secret::init_secret_store;
#[tokio::main]
async fn main() {
@ -42,7 +43,6 @@ async fn main() {
info!("Running in production mode.");
}
init_secret_store();
generate_runtime_certificate();
start_runtime_api();

View File

@ -1,16 +1,13 @@
use std::collections::HashSet;
use std::env;
use std::fs;
use std::path::{Path, PathBuf};
use std::fs;
use std::sync::OnceLock;
use log::{info, warn};
use log::warn;
use tokio::process::Command;
use crate::environment::DATA_DIRECTORY;
use crate::metadata::META_DATA;
/// Tracks whether the RID mismatch warning has been logged.
static HAS_LOGGED_RID_MISMATCH: OnceLock<()> = OnceLock::new();
static HAS_LOGGED_PANDOC_PATH: OnceLock<()> = OnceLock::new();
pub struct PandocExecutable {
pub executable: String,
@ -117,42 +114,28 @@ impl PandocProcessBuilder {
// Any local installation should be preferred over the system-wide installation.
let data_folder = PathBuf::from(DATA_DIRECTORY.get().unwrap());
let local_installation_root_directory = data_folder.join("pandoc");
let executable_name = Self::pandoc_executable_name();
if local_installation_root_directory.exists()
&& let Ok(pandoc_path) = Self::find_executable_in_dir(&local_installation_root_directory, &executable_name) {
HAS_LOGGED_PANDOC_PATH.get_or_init(|| {
info!(Source = "PandocProcessBuilder"; "Found local Pandoc installation at: '{}'.", pandoc_path.to_string_lossy()
);
});
if local_installation_root_directory.exists() {
let executable_name = Self::pandoc_executable_name();
return PandocExecutable {
executable: pandoc_path.to_string_lossy().to_string(),
is_local_installation: true,
};
}
for candidate in Self::system_pandoc_executable_candidates(&executable_name) {
if candidate.exists() && candidate.is_file() {
HAS_LOGGED_PANDOC_PATH.get_or_init(|| {
info!(Source = "PandocProcessBuilder"; "Found system Pandoc installation at: '{}'.", candidate.to_string_lossy()
);
});
return PandocExecutable {
executable: candidate.to_string_lossy().to_string(),
is_local_installation: false,
};
if let Ok(entries) = fs::read_dir(&local_installation_root_directory) {
for entry in entries.flatten() {
let path = entry.path();
if path.is_dir() {
if let Ok(pandoc_path) = Self::find_executable_in_dir(&path, &executable_name) {
return PandocExecutable {
executable: pandoc_path.to_string_lossy().to_string(),
is_local_installation: true,
};
}
}
}
}
}
// When no local installation was found, we assume that the pandoc executable is in the system PATH:
HAS_LOGGED_PANDOC_PATH.get_or_init(|| {
warn!(Source = "PandocProcessBuilder"; "Falling back to system PATH for the Pandoc executable: '{}'.", executable_name);
});
PandocExecutable {
executable: executable_name,
executable: Self::pandoc_executable_name(),
is_local_installation: false,
}
}
@ -167,8 +150,10 @@ impl PandocProcessBuilder {
if let Ok(entries) = fs::read_dir(dir) {
for entry in entries.flatten() {
let path = entry.path();
if path.is_dir() && let Ok(found_path) = Self::find_executable_in_dir(&path, executable_name) {
return Ok(found_path);
if path.is_dir() {
if let Ok(found_path) = Self::find_executable_in_dir(&path, executable_name) {
return Ok(found_path);
}
}
}
}
@ -176,56 +161,6 @@ impl PandocProcessBuilder {
Err("Executable not found".into())
}
fn system_pandoc_executable_candidates(executable_name: &str) -> Vec<PathBuf> {
let mut candidates: Vec<PathBuf> = Vec::new();
match env::consts::OS {
"windows" => {
Self::push_env_candidate(&mut candidates, "LOCALAPPDATA", &["Pandoc", executable_name]);
Self::push_env_candidate(&mut candidates, "ProgramFiles", &["Pandoc", executable_name]);
Self::push_env_candidate(&mut candidates, "ProgramFiles(x86)", &["Pandoc", executable_name]);
},
"macos" => {
candidates.push(PathBuf::from("/opt/homebrew/bin").join(executable_name));
candidates.push(PathBuf::from("/usr/local/bin").join(executable_name));
candidates.push(PathBuf::from("/usr/bin").join(executable_name));
},
"linux" => {
candidates.push(PathBuf::from("/usr/local/bin").join(executable_name));
candidates.push(PathBuf::from("/usr/bin").join(executable_name));
candidates.push(PathBuf::from("/snap/bin").join(executable_name));
if let Some(home_dir) = env::var_os("HOME") {
candidates.push(PathBuf::from(home_dir).join(".local").join("bin").join(executable_name));
}
},
_ => {},
}
if let Some(path_value) = env::var_os("PATH") {
for path_dir in env::split_paths(&path_value) {
candidates.push(path_dir.join(executable_name));
}
}
let mut seen = HashSet::new();
candidates
.into_iter()
.filter(|path| seen.insert(path.clone()))
.collect()
}
fn push_env_candidate(candidates: &mut Vec<PathBuf>, env_name: &str, parts: &[&str]) {
if let Some(root) = env::var_os(env_name) {
let mut path = PathBuf::from(root);
for part in parts {
path.push(part);
}
candidates.push(path);
}
}
/// Determines the executable name based on the current OS at runtime.
///
/// This uses runtime detection instead of metadata to ensure correct behavior
@ -237,31 +172,33 @@ impl PandocProcessBuilder {
let runtime_os = std::env::consts::OS;
let runtime_arch = std::env::consts::ARCH;
if let Ok(metadata) = META_DATA.lock() && let Some(metadata) = metadata.as_ref() {
let metadata_arch = &metadata.architecture;
if let Ok(metadata) = META_DATA.lock() {
if let Some(metadata) = metadata.as_ref() {
let metadata_arch = &metadata.architecture;
// Determine expected OS from metadata:
let metadata_is_windows = metadata_arch.starts_with("win-");
let metadata_is_macos = metadata_arch.starts_with("osx-");
let metadata_is_linux = metadata_arch.starts_with("linux-");
// Determine expected OS from metadata:
let metadata_is_windows = metadata_arch.starts_with("win-");
let metadata_is_macos = metadata_arch.starts_with("osx-");
let metadata_is_linux = metadata_arch.starts_with("linux-");
// Compare with runtime OS:
let runtime_is_windows = runtime_os == "windows";
let runtime_is_macos = runtime_os == "macos";
let runtime_is_linux = runtime_os == "linux";
// Compare with runtime OS:
let runtime_is_windows = runtime_os == "windows";
let runtime_is_macos = runtime_os == "macos";
let runtime_is_linux = runtime_os == "linux";
let os_mismatch = (metadata_is_windows != runtime_is_windows)
|| (metadata_is_macos != runtime_is_macos)
|| (metadata_is_linux != runtime_is_linux);
let os_mismatch = (metadata_is_windows != runtime_is_windows)
|| (metadata_is_macos != runtime_is_macos)
|| (metadata_is_linux != runtime_is_linux);
if os_mismatch {
warn!(
Source = "Pandoc";
"Runtime-detected OS '{}-{}' differs from metadata architecture '{}'. Using runtime-detected OS. This is expected on dev machines where metadata.txt may be outdated.",
runtime_os,
runtime_arch,
metadata_arch
);
if os_mismatch {
warn!(
Source = "Pandoc";
"Runtime-detected OS '{}-{}' differs from metadata architecture '{}'. Using runtime-detected OS. This is expected on dev machines where metadata.txt may be outdated.",
runtime_os,
runtime_arch,
metadata_arch
);
}
}
}
});

View File

@ -7,19 +7,18 @@ use std::path::Path;
use std::sync::{Arc, Mutex, OnceLock};
use log::{debug, error, info, warn};
use once_cell::sync::Lazy;
use axum::Json;
use serde::Serialize;
use rocket::get;
use rocket::serde::json::Json;
use rocket::serde::Serialize;
use tauri::api::process::{Command, CommandChild, CommandEvent};
use crate::api_token::{APIToken};
use crate::environment::{is_dev, DATA_DIRECTORY};
use crate::certificate_factory::generate_certificate;
use std::path::PathBuf;
use tauri::Manager;
use tauri::path::BaseDirectory;
use tauri::PathResolver;
use tempfile::{TempDir, Builder};
use crate::stale_process_cleanup::{kill_stale_process, log_potential_stale_process};
use crate::sidecar_types::SidecarType;
use tauri_plugin_shell::process::{CommandChild, CommandEvent};
use tauri_plugin_shell::ShellExt;
// Qdrant server process started in a separate process and can communicate
// via HTTP or gRPC with the .NET server and the runtime process
@ -69,7 +68,8 @@ pub struct ProvideQdrantInfo {
unavailable_reason: Option<String>,
}
pub async fn qdrant_port(_token: APIToken) -> Json<ProvideQdrantInfo> {
#[get("/system/qdrant/info")]
pub fn qdrant_port(_token: APIToken) -> Json<ProvideQdrantInfo> {
let status = QDRANT_STATUS.lock().unwrap();
let is_available = status.is_available;
let unavailable_reason = status.unavailable_reason.clone();
@ -98,12 +98,14 @@ pub async fn qdrant_port(_token: APIToken) -> Json<ProvideQdrantInfo> {
}
/// Starts the Qdrant server in a separate process.
pub fn start_qdrant_server<R: tauri::Runtime>(app_handle: tauri::AppHandle<R>){
pub fn start_qdrant_server(path_resolver: PathResolver){
let path = qdrant_base_path();
if !path.exists() && let Err(e) = fs::create_dir_all(&path){
error!(Source="Qdrant"; "The required directory to host the Qdrant database could not be created: {}", e);
set_qdrant_unavailable(format!("The Qdrant data directory could not be created: {e}"));
return;
if !path.exists() {
if let Err(e) = fs::create_dir_all(&path){
error!(Source="Qdrant"; "The required directory to host the Qdrant database could not be created: {}", e);
set_qdrant_unavailable(format!("The Qdrant data directory could not be created: {e}"));
return;
};
}
let (cert_path, key_path) = match create_temp_tls_files(&path) {
@ -119,7 +121,7 @@ pub fn start_qdrant_server<R: tauri::Runtime>(app_handle: tauri::AppHandle<R>){
let snapshot_path = path.join("snapshots").to_string_lossy().to_string();
let init_path = path.join(".qdrant-initialized").to_string_lossy().to_string();
let qdrant_server_environment: HashMap<String, String> = HashMap::from_iter([
let qdrant_server_environment = HashMap::from_iter([
(String::from("QDRANT__SERVICE__HTTP_PORT"), QDRANT_SERVER_PORT_HTTP.to_string()),
(String::from("QDRANT__SERVICE__GRPC_PORT"), QDRANT_SERVER_PORT_GRPC.to_string()),
(String::from("QDRANT_INIT_FILE_PATH"), init_path),
@ -133,9 +135,9 @@ pub fn start_qdrant_server<R: tauri::Runtime>(app_handle: tauri::AppHandle<R>){
let server_spawn_clone = QDRANT_SERVER.clone();
let qdrant_relative_source_path = "resources/databases/qdrant/config.yaml";
let qdrant_source_path = match app_handle.path().resolve(qdrant_relative_source_path, BaseDirectory::Resource) {
Ok(path) => path,
Err(_) => {
let qdrant_source_path = match path_resolver.resolve_resource(qdrant_relative_source_path) {
Some(path) => path,
None => {
let reason = format!("The Qdrant config resource '{qdrant_relative_source_path}' could not be resolved.");
error!(Source = "Qdrant"; "{reason} Starting the app without Qdrant.");
set_qdrant_unavailable(reason);
@ -145,9 +147,7 @@ pub fn start_qdrant_server<R: tauri::Runtime>(app_handle: tauri::AppHandle<R>){
let qdrant_source_path_display = qdrant_source_path.to_string_lossy().to_string();
tauri::async_runtime::spawn(async move {
let shell = app_handle.shell();
let sidecar = match shell.sidecar("qdrant") {
let sidecar = match Command::new_sidecar("qdrant") {
Ok(sidecar) => sidecar,
Err(e) => {
let reason = format!("Failed to create sidecar for Qdrant: {e}");
@ -183,8 +183,7 @@ pub fn start_qdrant_server<R: tauri::Runtime>(app_handle: tauri::AppHandle<R>){
while let Some(event) = rx.recv().await {
match event {
CommandEvent::Stdout(line) => {
let line_utf8 = String::from_utf8_lossy(&line).to_string();
let line = line_utf8.trim_end();
let line = line.trim_end();
if line.contains("INFO") || line.contains("info") {
info!(Source = "Qdrant Server"; "{line}");
} else if line.contains("WARN") || line.contains("warning") {
@ -197,8 +196,7 @@ pub fn start_qdrant_server<R: tauri::Runtime>(app_handle: tauri::AppHandle<R>){
},
CommandEvent::Stderr(line) => {
let line_utf8 = String::from_utf8_lossy(&line).to_string();
error!(Source = "Qdrant Server (stderr)"; "{line_utf8}");
error!(Source = "Qdrant Server (stderr)"; "{line}");
},
_ => {}

View File

@ -1,16 +1,12 @@
use log::info;
use once_cell::sync::Lazy;
use axum::routing::{get, post};
use axum::Router;
use axum_server::tls_rustls::RustlsConfig;
use std::net::SocketAddr;
use std::sync::Once;
use rocket::config::Shutdown;
use rocket::figment::Figment;
use rocket::routes;
use crate::runtime_certificate::{CERTIFICATE, CERTIFICATE_PRIVATE_KEY};
use crate::environment::is_dev;
use crate::network::get_available_port;
static RUSTLS_CRYPTO_PROVIDER_INIT: Once = Once::new();
/// The port used for the runtime API server. In the development environment, we use a fixed
/// port, in the production environment we use the next available port. This differentiation
/// is necessary because we cannot communicate the port to the .NET server in the development
@ -28,55 +24,109 @@ pub static API_SERVER_PORT: Lazy<u16> = Lazy::new(|| {
pub fn start_runtime_api() {
let api_port = *API_SERVER_PORT;
info!("Try to start the API server on 'http://localhost:{api_port}'...");
// Get the shutdown configuration:
let shutdown = create_shutdown();
let app = Router::new()
.route("/system/dotnet/port", get(crate::dotnet::dotnet_port))
.route("/system/dotnet/ready", get(crate::dotnet::dotnet_ready))
.route("/system/qdrant/info", get(crate::qdrant::qdrant_port))
.route("/clipboard/set", post(crate::clipboard::set_clipboard))
.route("/events", get(crate::app_window::get_event_stream))
.route("/updates/check", get(crate::app_window::check_for_update))
.route("/updates/install", get(crate::app_window::install_update))
.route("/app/exit", post(crate::app_window::exit_app))
.route("/select/directory", post(crate::file_actions::select_directory))
.route("/select/file", post(crate::file_actions::select_file))
.route("/select/files", post(crate::file_actions::select_files))
.route("/save/file", post(crate::file_actions::save_file))
.route("/secrets/get", post(crate::secret::get_secret))
.route("/secrets/store", post(crate::secret::store_secret))
.route("/secrets/delete", post(crate::secret::delete_secret))
.route("/system/directories/config", get(crate::environment::get_config_directory))
.route("/system/directories/data", get(crate::environment::get_data_directory))
.route("/system/language", get(crate::environment::read_user_language))
.route("/system/enterprise/config/id", get(crate::environment::read_enterprise_env_config_id))
.route("/system/enterprise/config/server", get(crate::environment::read_enterprise_env_config_server_url))
.route("/system/enterprise/config/encryption_secret", get(crate::environment::read_enterprise_env_config_encryption_secret))
.route("/system/enterprise/configs", get(crate::environment::read_enterprise_configs))
.route("/retrieval/fs/extract", get(crate::file_data::extract_data))
.route("/log/paths", get(crate::log::get_log_paths))
.route("/log/event", post(crate::log::log_event))
.route("/shortcuts/register", post(crate::app_window::register_shortcut))
.route("/shortcuts/validate", post(crate::app_window::validate_shortcut))
.route("/shortcuts/suspend", post(crate::app_window::suspend_shortcuts))
.route("/shortcuts/resume", post(crate::app_window::resume_shortcuts));
// Configure the runtime API server:
let figment = Figment::from(rocket::Config::release_default())
// We use the next available port which was determined before:
.merge(("port", api_port))
// The runtime API server should be accessible only from the local machine:
.merge(("address", "127.0.0.1"))
// We do not want to use the Ctrl+C signal to stop the server:
.merge(("ctrlc", false))
// Set a name for the server:
.merge(("ident", "AI Studio Runtime API"))
// Set the maximum number of workers and blocking threads:
.merge(("workers", 3))
.merge(("max_blocking", 12))
// No colors and emojis in the log output:
.merge(("cli_colors", false))
// Read the TLS certificate and key from the generated certificate data in-memory:
.merge(("tls.certs", CERTIFICATE.get().unwrap()))
.merge(("tls.key", CERTIFICATE_PRIVATE_KEY.get().unwrap()))
// Set the shutdown configuration:
.merge(("shutdown", shutdown));
//
// Start the runtime API server in a separate thread. This is necessary
// because the server is blocking, and we need to run the Tauri app in
// parallel:
//
tauri::async_runtime::spawn(async move {
install_rustls_crypto_provider();
let cert = CERTIFICATE.get().unwrap().clone();
let key = CERTIFICATE_PRIVATE_KEY.get().unwrap().clone();
let tls_config = RustlsConfig::from_pem(cert, key).await.unwrap();
let addr = SocketAddr::from(([127, 0, 0, 1], api_port));
axum_server::bind_rustls(addr, tls_config)
.serve(app.into_make_service())
.await
.unwrap();
rocket::custom(figment)
.mount("/", routes![
crate::dotnet::dotnet_port,
crate::dotnet::dotnet_ready,
crate::qdrant::qdrant_port,
crate::clipboard::set_clipboard,
crate::app_window::get_event_stream,
crate::app_window::check_for_update,
crate::app_window::install_update,
crate::app_window::select_directory,
crate::app_window::select_file,
crate::app_window::select_files,
crate::app_window::save_file,
crate::app_window::exit_app,
crate::secret::get_secret,
crate::secret::store_secret,
crate::secret::delete_secret,
crate::environment::get_data_directory,
crate::environment::get_config_directory,
crate::environment::read_user_language,
crate::environment::read_enterprise_env_config_id,
crate::environment::read_enterprise_env_config_server_url,
crate::environment::read_enterprise_env_config_encryption_secret,
crate::environment::read_enterprise_configs,
crate::file_data::extract_data,
crate::log::get_log_paths,
crate::log::log_event,
crate::app_window::register_shortcut,
crate::app_window::validate_shortcut,
crate::app_window::suspend_shortcuts,
crate::app_window::resume_shortcuts,
])
.ignite().await.unwrap()
.launch().await.unwrap();
});
}
fn install_rustls_crypto_provider() {
RUSTLS_CRYPTO_PROVIDER_INIT.call_once(|| {
let _ = rustls::crypto::aws_lc_rs::default_provider().install_default();
});
fn create_shutdown() -> Shutdown {
//
// Create a shutdown configuration, depending on the operating system:
//
#[cfg(unix)]
{
use std::collections::HashSet;
let mut shutdown = Shutdown {
// We do not want to use the Ctrl+C signal to stop the server:
ctrlc: false,
// Everything else is set to default for now:
..Shutdown::default()
};
shutdown.signals = HashSet::new();
shutdown
}
#[cfg(windows)]
{
Shutdown {
// We do not want to use the Ctrl+C signal to stop the server:
ctrlc: false,
// Everything else is set to default for now:
..Shutdown::default()
}
}
}

View File

@ -1,29 +1,33 @@
use once_cell::sync::Lazy;
use axum::extract::FromRequestParts;
use axum::http::request::Parts;
use axum::http::StatusCode;
use rocket::http::Status;
use rocket::Request;
use rocket::request::FromRequest;
use crate::api_token::{generate_api_token, APIToken};
pub static API_TOKEN: Lazy<APIToken> = Lazy::new(generate_api_token);
pub static API_TOKEN: Lazy<APIToken> = Lazy::new(|| generate_api_token());
impl<S> FromRequestParts<S> for APIToken
where
S: Send + Sync,
{
type Rejection = StatusCode;
/// The request outcome type used to handle API token requests.
type RequestOutcome<R, T> = rocket::request::Outcome<R, T>;
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
match parts.headers.get("token").and_then(|value| value.to_str().ok()) {
/// The request outcome implementation for the API token.
#[rocket::async_trait]
impl<'r> FromRequest<'r> for APIToken {
type Error = APITokenError;
/// Handles the API token requests.
async fn from_request(request: &'r Request<'_>) -> RequestOutcome<Self, Self::Error> {
let token = request.headers().get_one("token");
match token {
Some(token) => {
let received_token = APIToken::from_hex_text(token);
if API_TOKEN.validate(&received_token) {
Ok(received_token)
RequestOutcome::Success(received_token)
} else {
Err(StatusCode::UNAUTHORIZED)
RequestOutcome::Error((Status::Unauthorized, APITokenError::Invalid))
}
}
None => Err(StatusCode::UNAUTHORIZED),
None => RequestOutcome::Error((Status::Unauthorized, APITokenError::Missing)),
}
}
}

View File

@ -1,45 +1,15 @@
use axum::Json;
use keyring_core::{Entry, Error as KeyringError};
use keyring::Entry;
use log::{error, info, warn};
use rocket::post;
use rocket::serde::json::Json;
use serde::{Deserialize, Serialize};
use keyring::error::Error::NoEntry;
use crate::api_token::APIToken;
use crate::encryption::{EncryptedText, ENCRYPTION};
/// Initializes the native credential store used by keyring-core.
pub fn init_secret_store() {
cfg_if::cfg_if! {
if #[cfg(target_os = "macos")] {
match apple_native_keyring_store::keychain::Store::new() {
Ok(store) => {
keyring_core::set_default_store(store);
info!(Source = "Secret Store"; "Initialized the macOS Keychain credential store.");
},
Err(e) => error!(Source = "Secret Store"; "Failed to initialize the macOS Keychain credential store: {e}."),
}
} else if #[cfg(target_os = "windows")] {
match windows_native_keyring_store::Store::new() {
Ok(store) => {
keyring_core::set_default_store(store);
info!(Source = "Secret Store"; "Initialized the Windows Credential Manager store.");
},
Err(e) => error!(Source = "Secret Store"; "Failed to initialize the Windows Credential Manager store: {e}."),
}
} else if #[cfg(target_os = "linux")] {
match dbus_secret_service_keyring_store::Store::new() {
Ok(store) => {
keyring_core::set_default_store(store);
info!(Source = "Secret Store"; "Initialized the DBus Secret Service credential store.");
},
Err(e) => error!(Source = "Secret Store"; "Failed to initialize the DBus Secret Service credential store: {e}."),
}
} else {
warn!(Source = "Secret Store"; "No native credential store is configured for this platform.");
}
}
}
/// Stores a secret in the secret store using the operating system's keyring.
pub async fn store_secret(_token: APIToken, request: Json<StoreSecret>) -> Json<StoreSecretResponse> {
#[post("/secrets/store", data = "<request>")]
pub fn store_secret(_token: APIToken, request: Json<StoreSecret>) -> Json<StoreSecretResponse> {
let user_name = request.user_name.as_str();
let decrypted_text = match ENCRYPTION.decrypt(&request.secret) {
Ok(text) => text,
@ -53,16 +23,7 @@ pub async fn store_secret(_token: APIToken, request: Json<StoreSecret>) -> Json<
};
let service = format!("mindwork-ai-studio::{}", request.destination);
let entry = match Entry::new(service.as_str(), user_name) {
Ok(entry) => entry,
Err(e) => {
error!(Source = "Secret Store"; "Failed to create secret entry for {service} and user {user_name}: {e}.");
return Json(StoreSecretResponse {
success: false,
issue: e.to_string(),
});
},
};
let entry = Entry::new(service.as_str(), user_name).unwrap();
let result = entry.set_password(decrypted_text.as_str());
match result {
Ok(_) => {
@ -99,23 +60,11 @@ pub struct StoreSecretResponse {
}
/// Retrieves a secret from the secret store using the operating system's keyring.
pub async fn get_secret(_token: APIToken, request: Json<RequestSecret>) -> Json<RequestedSecret> {
#[post("/secrets/get", data = "<request>")]
pub fn get_secret(_token: APIToken, request: Json<RequestSecret>) -> Json<RequestedSecret> {
let user_name = request.user_name.as_str();
let service = format!("mindwork-ai-studio::{}", request.destination);
let entry = match Entry::new(service.as_str(), user_name) {
Ok(entry) => entry,
Err(e) => {
if !request.is_trying {
error!(Source = "Secret Store"; "Failed to create secret entry for '{service}' and user '{user_name}': {e}.");
}
return Json(RequestedSecret {
success: false,
secret: EncryptedText::new(String::from("")),
issue: format!("Failed to create secret entry for '{service}' and user '{user_name}': {e}"),
});
},
};
let entry = Entry::new(service.as_str(), user_name).unwrap();
let secret = entry.get_password();
match secret {
Ok(s) => {
@ -172,20 +121,11 @@ pub struct RequestedSecret {
}
/// Deletes a secret from the secret store using the operating system's keyring.
pub async fn delete_secret(_token: APIToken, request: Json<RequestSecret>) -> Json<DeleteSecretResponse> {
#[post("/secrets/delete", data = "<request>")]
pub fn delete_secret(_token: APIToken, request: Json<RequestSecret>) -> Json<DeleteSecretResponse> {
let user_name = request.user_name.as_str();
let service = format!("mindwork-ai-studio::{}", request.destination);
let entry = match Entry::new(service.as_str(), user_name) {
Ok(entry) => entry,
Err(e) => {
error!(Source = "Secret Store"; "Failed to create secret entry for {service} and user {user_name}: {e}.");
return Json(DeleteSecretResponse {
success: false,
was_entry_found: false,
issue: e.to_string(),
});
},
};
let entry = Entry::new(service.as_str(), user_name).unwrap();
let result = entry.delete_credential();
match result {
@ -198,7 +138,7 @@ pub async fn delete_secret(_token: APIToken, request: Json<RequestSecret>) -> Js
})
},
Err(KeyringError::NoEntry) => {
Err(NoEntry) => {
warn!(Source = "Secret Store"; "No secret for {service} and user {user_name} was found.");
Json(DeleteSecretResponse {
success: true,

View File

@ -50,7 +50,7 @@ pub fn kill_stale_process(pid_file_path: PathBuf, sidecar_type: SidecarType) ->
let killed = process.kill_with(Signal::Kill).unwrap_or_else(|| process.kill());
if !killed {
return Err(Error::other("Failed to kill process"));
return Err(Error::new(ErrorKind::Other, "Failed to kill process"));
}
info!(Source="Stale Process Cleanup";"{}: Killed process: \"{}\"", sidecar_type,pid_file_path.display());
} else {

View File

@ -1,57 +1,44 @@
{
"productName": "MindWork AI Studio",
"mainBinaryName": "MindWork AI Studio",
"version": "26.5.4",
"identifier": "com.github.mindwork-ai.ai-studio",
"build": {
"frontendDist": "ui/"
"devPath": "ui/",
"distDir": "ui/",
"withGlobalTauri": false
},
"bundle": {
"active": true,
"targets": [
"appimage",
"app",
"dmg",
"nsis"
],
"icon": [
"icons/32x32.png",
"icons/128x128.png",
"icons/128x128@2x.png",
"icons/icon.icns",
"icons/icon.ico"
],
"externalBin": [
"../app/MindWork AI Studio/bin/dist/mindworkAIStudioServer",
"target/databases/qdrant/qdrant"
],
"resources": [
"resources/databases/qdrant/config.yaml",
"resources/libraries/*"
],
"macOS": {
"exceptionDomain": "localhost"
},
"createUpdaterArtifacts": "v1Compatible"
"package": {
"productName": "MindWork AI Studio",
"version": "26.5.1"
},
"plugins": {
"updater": {
"windows": {
"installMode": "passive"
"tauri": {
"allowlist": {
"all": false,
"shell": {
"sidecar": true,
"all": false,
"open": true,
"scope": [
{
"name": "../app/MindWork AI Studio/bin/dist/mindworkAIStudioServer",
"sidecar": true,
"args": true
},
{
"name": "target/databases/qdrant/qdrant",
"sidecar": true,
"args": true
}
]
},
"endpoints": [
"https://github.com/MindWorkAI/AI-Studio/releases/download/v26.5.4/latest.json"
],
"pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IDM3MzE4MTM4RTNDMkM0NEQKUldSTnhNTGpPSUV4TjFkczFxRFJOZWgydzFQN1dmaFlKbXhJS1YyR1RKS1RnR09jYUpMaGsrWXYK"
}
},
"app": {
"withGlobalTauri": false,
"http" : {
"all": true,
"request": true,
"scope": [
"http://localhost"
]
},
"fs": {
"scope": ["$RESOURCE/resources/*"]
}
},
"windows": [
{
"fullscreen": false,
@ -59,13 +46,51 @@
"title": "MindWork AI Studio",
"width": 1920,
"height": 1080,
"dragDropEnabled": true,
"useHttpsScheme": true
"fileDropEnabled": true
}
],
"security": {
"csp": null
"csp": null,
"dangerousRemoteDomainIpcAccess": [
{
"domain": "localhost",
"windows": ["main"],
"enableTauriAPI": true
}
]
},
"bundle": {
"active": true,
"targets": "all",
"identifier": "com.github.mindwork-ai.ai-studio",
"externalBin": [
"../app/MindWork AI Studio/bin/dist/mindworkAIStudioServer",
"target/databases/qdrant/qdrant"
],
"resources": [
"resources/**"
],
"macOS": {
"exceptionDomain": "localhost"
},
"icon": [
"icons/32x32.png",
"icons/128x128.png",
"icons/128x128@2x.png",
"icons/icon.icns",
"icons/icon.ico"
]
},
"updater": {
"active": true,
"endpoints": [
"https://github.com/MindWorkAI/AI-Studio/releases/download/v26.5.2/latest.json"
],
"dialog": false,
"windows": {
"installMode": "passive"
},
"pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IDM3MzE4MTM4RTNDMkM0NEQKUldSTnhNTGpPSUV4TjFkczFxRFJOZWgydzFQN1dmaFlKbXhJS1YyR1RKS1RnR09jYUpMaGsrWXYK"
}
}
}