mirror of
https://github.com/MindWorkAI/AI-Studio.git
synced 2026-02-12 02:01:36 +00:00
Add Qdrant as vector database (#580)
Some checks failed
Build and Release / Read metadata (push) Has been cancelled
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-apple-darwin, osx-arm64, macos-latest, aarch64-apple-darwin, dmg updater) (push) Has been cancelled
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-pc-windows-msvc.exe, win-arm64, windows-latest, aarch64-pc-windows-msvc, nsis updater) (push) Has been cancelled
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-unknown-linux-gnu, linux-arm64, ubuntu-22.04-arm, aarch64-unknown-linux-gnu, appimage deb updater) (push) Has been cancelled
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-apple-darwin, osx-x64, macos-latest, x86_64-apple-darwin, dmg updater) (push) Has been cancelled
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-pc-windows-msvc.exe, win-x64, windows-latest, x86_64-pc-windows-msvc, nsis updater) (push) Has been cancelled
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-unknown-linux-gnu, linux-x64, ubuntu-22.04, x86_64-unknown-linux-gnu, appimage deb updater) (push) Has been cancelled
Build and Release / Prepare & create release (push) Has been cancelled
Build and Release / Publish release (push) Has been cancelled
Some checks failed
Build and Release / Read metadata (push) Has been cancelled
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-apple-darwin, osx-arm64, macos-latest, aarch64-apple-darwin, dmg updater) (push) Has been cancelled
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-pc-windows-msvc.exe, win-arm64, windows-latest, aarch64-pc-windows-msvc, nsis updater) (push) Has been cancelled
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-unknown-linux-gnu, linux-arm64, ubuntu-22.04-arm, aarch64-unknown-linux-gnu, appimage deb updater) (push) Has been cancelled
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-apple-darwin, osx-x64, macos-latest, x86_64-apple-darwin, dmg updater) (push) Has been cancelled
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-pc-windows-msvc.exe, win-x64, windows-latest, x86_64-pc-windows-msvc, nsis updater) (push) Has been cancelled
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-unknown-linux-gnu, linux-x64, ubuntu-22.04, x86_64-unknown-linux-gnu, appimage deb updater) (push) Has been cancelled
Build and Release / Prepare & create release (push) Has been cancelled
Build and Release / Publish release (push) Has been cancelled
Co-authored-by: Thorsten Sommer <SommerEngineering@users.noreply.github.com>
This commit is contained in:
parent
e65874d99b
commit
5af6a8db3e
134
.github/workflows/build-and-release.yml
vendored
134
.github/workflows/build-and-release.yml
vendored
@ -173,6 +173,9 @@ jobs:
|
||||
pdfium_version=$(sed -n '11p' metadata.txt)
|
||||
pdfium_version=$(echo $pdfium_version | cut -d'.' -f3)
|
||||
|
||||
# Next line is the Qdrant version:
|
||||
qdrant_version="v$(sed -n '12p' metadata.txt)"
|
||||
|
||||
# Write the metadata to the environment:
|
||||
echo "APP_VERSION=${app_version}" >> $GITHUB_ENV
|
||||
echo "FORMATTED_APP_VERSION=${formatted_app_version}" >> $GITHUB_ENV
|
||||
@ -185,6 +188,7 @@ jobs:
|
||||
echo "TAURI_VERSION=${tauri_version}" >> $GITHUB_ENV
|
||||
echo "ARCHITECTURE=${{ matrix.dotnet_runtime }}" >> $GITHUB_ENV
|
||||
echo "PDFIUM_VERSION=${pdfium_version}" >> $GITHUB_ENV
|
||||
echo "QDRANT_VERSION=${qdrant_version}" >> $GITHUB_ENV
|
||||
|
||||
# Log the metadata:
|
||||
echo "App version: '${formatted_app_version}'"
|
||||
@ -197,6 +201,7 @@ jobs:
|
||||
echo "Tauri version: '${tauri_version}'"
|
||||
echo "Architecture: '${{ matrix.dotnet_runtime }}'"
|
||||
echo "PDFium version: '${pdfium_version}'"
|
||||
echo "Qdrant version: '${qdrant_version}'"
|
||||
|
||||
- name: Read and format metadata (Windows)
|
||||
if: matrix.platform == 'windows-latest'
|
||||
@ -241,6 +246,9 @@ jobs:
|
||||
$pdfium_version = $metadata[10]
|
||||
$pdfium_version = $pdfium_version.Split('.')[2]
|
||||
|
||||
# Next line is the necessary Qdrant version:
|
||||
$qdrant_version = "v$($metadata[11])"
|
||||
|
||||
# Write the metadata to the environment:
|
||||
Write-Output "APP_VERSION=${app_version}" >> $env:GITHUB_ENV
|
||||
Write-Output "FORMATTED_APP_VERSION=${formatted_app_version}" >> $env:GITHUB_ENV
|
||||
@ -252,6 +260,7 @@ jobs:
|
||||
Write-Output "MUD_BLAZOR_VERSION=${mud_blazor_version}" >> $env:GITHUB_ENV
|
||||
Write-Output "ARCHITECTURE=${{ matrix.dotnet_runtime }}" >> $env:GITHUB_ENV
|
||||
Write-Output "PDFIUM_VERSION=${pdfium_version}" >> $env:GITHUB_ENV
|
||||
Write-Output "QDRANT_VERSION=${qdrant_version}" >> $env:GITHUB_ENV
|
||||
|
||||
# Log the metadata:
|
||||
Write-Output "App version: '${formatted_app_version}'"
|
||||
@ -264,6 +273,7 @@ jobs:
|
||||
Write-Output "Tauri version: '${tauri_version}'"
|
||||
Write-Output "Architecture: '${{ matrix.dotnet_runtime }}'"
|
||||
Write-Output "PDFium version: '${pdfium_version}'"
|
||||
Write-Output "Qdrant version: '${qdrant_version}'"
|
||||
|
||||
- name: Setup .NET
|
||||
uses: actions/setup-dotnet@v4
|
||||
@ -334,7 +344,7 @@ jobs:
|
||||
echo "Cleaning up ..."
|
||||
rm -fr "$TMP"
|
||||
|
||||
- name: Install PDFium (Windows)
|
||||
- name: Deploy PDFium (Windows)
|
||||
if: matrix.platform == 'windows-latest'
|
||||
env:
|
||||
PDFIUM_VERSION: ${{ env.PDFIUM_VERSION }}
|
||||
@ -385,6 +395,128 @@ jobs:
|
||||
Write-Host "Cleaning up ..."
|
||||
Remove-Item $ARCHIVE -Force -ErrorAction SilentlyContinue
|
||||
|
||||
# Try to remove the temporary directory, but ignore errors if files are still in use
|
||||
try {
|
||||
Remove-Item $TMP -Recurse -Force -ErrorAction Stop
|
||||
Write-Host "Successfully cleaned up temporary directory: $TMP"
|
||||
} catch {
|
||||
Write-Warning "Could not fully clean up temporary directory: $TMP. This is usually harmless as Windows will clean it up later. Error: $($_.Exception.Message)"
|
||||
}
|
||||
- name: Deploy Qdrant (Unix)
|
||||
if: matrix.platform != 'windows-latest'
|
||||
env:
|
||||
QDRANT_VERSION: ${{ env.QDRANT_VERSION }}
|
||||
DOTNET_RUNTIME: ${{ matrix.dotnet_runtime }}
|
||||
RUST_TARGET: ${{ matrix.rust_target }}
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Target directory:
|
||||
TDB_DIR="runtime/target/databases/qdrant"
|
||||
mkdir -p "$TDB_DIR"
|
||||
|
||||
case "${DOTNET_RUNTIME}" in
|
||||
linux-x64)
|
||||
QDRANT_FILE="x86_64-unknown-linux-gnu.tar.gz"
|
||||
DB_SOURCE="qdrant"
|
||||
DB_TARGET="qdrant-${RUST_TARGET}"
|
||||
;;
|
||||
linux-arm64)
|
||||
QDRANT_FILE="aarch64-unknown-linux-musl.tar.gz"
|
||||
DB_SOURCE="qdrant"
|
||||
DB_TARGET="qdrant-${RUST_TARGET}"
|
||||
;;
|
||||
osx-x64)
|
||||
QDRANT_FILE="x86_64-apple-darwin.tar.gz"
|
||||
DB_SOURCE="qdrant"
|
||||
DB_TARGET="qdrant-${RUST_TARGET}"
|
||||
;;
|
||||
osx-arm64)
|
||||
QDRANT_FILE="aarch64-apple-darwin.tar.gz"
|
||||
DB_SOURCE="qdrant"
|
||||
DB_TARGET="qdrant-${RUST_TARGET}"
|
||||
;;
|
||||
*)
|
||||
echo "Unknown platform: ${DOTNET_RUNTIME}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
QDRANT_URL="https://github.com/qdrant/qdrant/releases/download/${QDRANT_VERSION}/qdrant-${QDRANT_FILE}"
|
||||
|
||||
echo "Download Qdrant $QDRANT_URL ..."
|
||||
TMP=$(mktemp -d)
|
||||
ARCHIVE="${TMP}/qdrant.tgz"
|
||||
|
||||
curl -fsSL -o "$ARCHIVE" "$QDRANT_URL"
|
||||
|
||||
echo "Extracting Qdrant ..."
|
||||
tar xzf "$ARCHIVE" -C "$TMP"
|
||||
SRC="${TMP}/${DB_SOURCE}"
|
||||
|
||||
if [ ! -f "$SRC" ]; then
|
||||
echo "Was not able to find Qdrant source: $SRC"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Copy Qdrant from ${DB_TARGET} to ${TDB_DIR}/"
|
||||
cp -f "$SRC" "$TDB_DIR/$DB_TARGET"
|
||||
|
||||
echo "Cleaning up ..."
|
||||
rm -fr "$TMP"
|
||||
|
||||
- name: Deploy Qdrant (Windows)
|
||||
if: matrix.platform == 'windows-latest'
|
||||
env:
|
||||
QDRANT_VERSION: ${{ env.QDRANT_VERSION }}
|
||||
DOTNET_RUNTIME: ${{ matrix.dotnet_runtime }}
|
||||
RUST_TARGET: ${{ matrix.rust_target }}
|
||||
run: |
|
||||
$TDB_DIR = "runtime\target\databases\qdrant"
|
||||
New-Item -ItemType Directory -Force -Path $TDB_DIR | Out-Null
|
||||
|
||||
switch ($env:DOTNET_RUNTIME) {
|
||||
"win-x64" {
|
||||
$QDRANT_FILE = "x86_64-pc-windows-msvc.zip"
|
||||
$DB_SOURCE = "qdrant.exe"
|
||||
$DB_TARGET = "qdrant-$($env:RUST_TARGET).exe"
|
||||
}
|
||||
"win-arm64" {
|
||||
$QDRANT_FILE = "x86_64-pc-windows-msvc.zip"
|
||||
$DB_SOURCE = "qdrant.exe"
|
||||
$DB_TARGET = "qdrant-$($env:RUST_TARGET).exe"
|
||||
}
|
||||
default {
|
||||
Write-Error "Unknown platform: $($env:DOTNET_RUNTIME)"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
$QDRANT_URL = "https://github.com/qdrant/qdrant/releases/download/$($env:QDRANT_VERSION)/qdrant-$QDRANT_FILE"
|
||||
Write-Host "Download $QDRANT_URL ..."
|
||||
|
||||
# Create a unique temporary directory (not just a file)
|
||||
$TMP = Join-Path ([System.IO.Path]::GetTempPath()) ([System.IO.Path]::GetRandomFileName())
|
||||
New-Item -ItemType Directory -Path $TMP -Force | Out-Null
|
||||
$ARCHIVE = Join-Path $TMP "qdrant.tgz"
|
||||
|
||||
Invoke-WebRequest -Uri $QDRANT_URL -OutFile $ARCHIVE
|
||||
|
||||
Write-Host "Extracting Qdrant ..."
|
||||
tar -xzf $ARCHIVE -C $TMP
|
||||
|
||||
$SRC = Join-Path $TMP $DB_SOURCE
|
||||
if (!(Test-Path $SRC)) {
|
||||
Write-Error "Cannot find Qdrant source: $SRC"
|
||||
exit 1
|
||||
}
|
||||
|
||||
$DEST = Join-Path $TDB_DIR $DB_TARGET
|
||||
Copy-Item -Path $SRC -Destination $DEST -Force
|
||||
|
||||
Write-Host "Cleaning up ..."
|
||||
Remove-Item $ARCHIVE -Force -ErrorAction SilentlyContinue
|
||||
|
||||
# Try to remove the temporary directory, but ignore errors if files are still in use
|
||||
try {
|
||||
Remove-Item $TMP -Recurse -Force -ErrorAction Stop
|
||||
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@ -6,6 +6,13 @@ libpdfium.dylib
|
||||
libpdfium.so
|
||||
libpdfium.dll
|
||||
|
||||
# Ignore qdrant database:
|
||||
qdrant-aarch64-apple-darwin
|
||||
qdrant-x86_64-apple-darwin
|
||||
qdrant-aarch64-unknown-linux-gnu
|
||||
qdrant-x86_64-unknown-linux-gnu
|
||||
qdrant-x86_64-pc-windows-msvc.exe
|
||||
|
||||
# User-specific files
|
||||
*.rsuser
|
||||
*.suo
|
||||
|
||||
@ -32,7 +32,7 @@ Since November 2024: Work on RAG (integration of your data and files) has begun.
|
||||
- [x] ~~App: Implement dialog for checking & handling [pandoc](https://pandoc.org/) installation ([PR #393](https://github.com/MindWorkAI/AI-Studio/pull/393), [PR #487](https://github.com/MindWorkAI/AI-Studio/pull/487))~~
|
||||
- [ ] App: Implement external embedding providers
|
||||
- [ ] App: Implement the process to vectorize one local file using embeddings
|
||||
- [ ] Runtime: Integration of the vector database [Qdrant](https://github.com/qdrant/qdrant)
|
||||
- [x] ~~Runtime: Integration of the vector database [Qdrant](https://github.com/qdrant/qdrant) ([PR #580](https://github.com/MindWorkAI/AI-Studio/pull/580))~~
|
||||
- [ ] App: Implement the continuous process of vectorizing data
|
||||
- [x] ~~App: Define a common retrieval context interface for the integration of RAG processes in chats (PR [#281](https://github.com/MindWorkAI/AI-Studio/pull/281), [#284](https://github.com/MindWorkAI/AI-Studio/pull/284), [#286](https://github.com/MindWorkAI/AI-Studio/pull/286), [#287](https://github.com/MindWorkAI/AI-Studio/pull/287))~~
|
||||
- [x] ~~App: Define a common augmentation interface for the integration of RAG processes in chats (PR [#288](https://github.com/MindWorkAI/AI-Studio/pull/288), [#289](https://github.com/MindWorkAI/AI-Studio/pull/289))~~
|
||||
|
||||
3
app/Build/Commands/Database.cs
Normal file
3
app/Build/Commands/Database.cs
Normal file
@ -0,0 +1,3 @@
|
||||
namespace Build.Commands;
|
||||
|
||||
public record Database(string Path, string Filename);
|
||||
120
app/Build/Commands/Qdrant.cs
Normal file
120
app/Build/Commands/Qdrant.cs
Normal file
@ -0,0 +1,120 @@
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
|
||||
using SharedTools;
|
||||
|
||||
namespace Build.Commands;
|
||||
|
||||
public static class Qdrant
|
||||
{
|
||||
public static async Task InstallAsync(RID rid, string version)
|
||||
{
|
||||
Console.Write($"- Installing Qdrant {version} for {rid.ToUserFriendlyName()} ...");
|
||||
|
||||
var cwd = Environment.GetRustRuntimeDirectory();
|
||||
var qdrantTmpDownloadPath = Path.GetTempFileName();
|
||||
var qdrantTmpExtractPath = Directory.CreateTempSubdirectory();
|
||||
var qdrantUrl = GetQdrantDownloadUrl(rid, version);
|
||||
|
||||
//
|
||||
// Download the file:
|
||||
//
|
||||
Console.Write(" downloading ...");
|
||||
using (var client = new HttpClient())
|
||||
{
|
||||
var response = await client.GetAsync(qdrantUrl);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
Console.WriteLine($" failed to download Qdrant {version} for {rid.ToUserFriendlyName()} from {qdrantUrl}");
|
||||
return;
|
||||
}
|
||||
|
||||
await using var fileStream = File.Create(qdrantTmpDownloadPath);
|
||||
await response.Content.CopyToAsync(fileStream);
|
||||
}
|
||||
|
||||
//
|
||||
// Extract the downloaded file:
|
||||
//
|
||||
Console.Write(" extracting ...");
|
||||
await using(var zStream = File.Open(qdrantTmpDownloadPath, FileMode.Open, FileAccess.Read, FileShare.Read))
|
||||
{
|
||||
if (rid == RID.WIN_X64)
|
||||
{
|
||||
using var archive = new ZipArchive(zStream, ZipArchiveMode.Read);
|
||||
archive.ExtractToDirectory(qdrantTmpExtractPath.FullName, overwriteFiles: true);
|
||||
}
|
||||
else
|
||||
{
|
||||
await using var uncompressedStream = new GZipStream(zStream, CompressionMode.Decompress);
|
||||
await TarFile.ExtractToDirectoryAsync(uncompressedStream, qdrantTmpExtractPath.FullName, true);
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Copy the database to the target directory:
|
||||
//
|
||||
Console.Write(" deploying ...");
|
||||
var database = GetDatabasePath(rid);
|
||||
if (string.IsNullOrWhiteSpace(database.Path))
|
||||
{
|
||||
Console.WriteLine($" failed to find the database path for {rid.ToUserFriendlyName()}");
|
||||
return;
|
||||
}
|
||||
|
||||
var qdrantDbSourcePath = Path.Join(qdrantTmpExtractPath.FullName, database.Path);
|
||||
var qdrantDbTargetPath = Path.Join(cwd, "target", "databases", "qdrant",database.Filename);
|
||||
if (!File.Exists(qdrantDbSourcePath))
|
||||
{
|
||||
Console.WriteLine($" failed to find the database file '{qdrantDbSourcePath}'");
|
||||
return;
|
||||
}
|
||||
|
||||
Directory.CreateDirectory(Path.Join(cwd, "target", "databases", "qdrant"));
|
||||
if (File.Exists(qdrantDbTargetPath))
|
||||
File.Delete(qdrantDbTargetPath);
|
||||
|
||||
File.Copy(qdrantDbSourcePath, qdrantDbTargetPath);
|
||||
|
||||
//
|
||||
// Cleanup:
|
||||
//
|
||||
Console.Write(" cleaning up ...");
|
||||
File.Delete(qdrantTmpDownloadPath);
|
||||
Directory.Delete(qdrantTmpExtractPath.FullName, true);
|
||||
|
||||
Console.WriteLine(" done.");
|
||||
}
|
||||
|
||||
private static Database GetDatabasePath(RID rid) => rid switch
|
||||
{
|
||||
RID.OSX_ARM64 => new("qdrant", "qdrant-aarch64-apple-darwin"),
|
||||
RID.OSX_X64 => new("qdrant", "qdrant-x86_64-apple-darwin"),
|
||||
|
||||
RID.LINUX_ARM64 => new("qdrant", "qdrant-aarch64-unknown-linux-gnu"),
|
||||
RID.LINUX_X64 => new("qdrant", "qdrant-x86_64-unknown-linux-gnu"),
|
||||
|
||||
RID.WIN_X64 => new("qdrant.exe", "qdrant-x86_64-pc-windows-msvc.exe"),
|
||||
RID.WIN_ARM64 => new("qdrant.exe", "qdrant-aarch64-pc-windows-msvc.exe"),
|
||||
|
||||
_ => new(string.Empty, string.Empty),
|
||||
};
|
||||
|
||||
private static string GetQdrantDownloadUrl(RID rid, string version)
|
||||
{
|
||||
var baseUrl = $"https://github.com/qdrant/qdrant/releases/download/v{version}/qdrant-";
|
||||
return rid switch
|
||||
{
|
||||
RID.LINUX_ARM64 => $"{baseUrl}aarch64-unknown-linux-musl.tar.gz",
|
||||
RID.LINUX_X64 => $"{baseUrl}x86_64-unknown-linux-gnu.tar.gz",
|
||||
|
||||
RID.OSX_ARM64 => $"{baseUrl}aarch64-apple-darwin.tar.gz",
|
||||
RID.OSX_X64 => $"{baseUrl}x86_64-apple-darwin.tar.gz",
|
||||
|
||||
RID.WIN_X64 => $"{baseUrl}x86_64-pc-windows-msvc.zip",
|
||||
RID.WIN_ARM64 => $"{baseUrl}x86_64-pc-windows-msvc.zip",
|
||||
|
||||
_ => string.Empty,
|
||||
};
|
||||
}
|
||||
}
|
||||
@ -151,6 +151,9 @@ public sealed partial class UpdateMetadataCommands
|
||||
var pdfiumVersion = await this.ReadPdfiumVersion();
|
||||
await Pdfium.InstallAsync(rid, pdfiumVersion);
|
||||
|
||||
var qdrantVersion = await this.ReadQdrantVersion();
|
||||
await Qdrant.InstallAsync(rid, qdrantVersion);
|
||||
|
||||
Console.Write($"- Start .NET build for {rid.ToUserFriendlyName()} ...");
|
||||
await this.ReadCommandOutput(pathApp, "dotnet", $"clean --configuration release --runtime {rid.AsMicrosoftRid()}");
|
||||
var dotnetBuildOutput = await this.ReadCommandOutput(pathApp, "dotnet", $"publish --configuration release --runtime {rid.AsMicrosoftRid()} --disable-build-servers --force");
|
||||
@ -364,6 +367,16 @@ public sealed partial class UpdateMetadataCommands
|
||||
return shortVersion;
|
||||
}
|
||||
|
||||
private async Task<string> ReadQdrantVersion()
|
||||
{
|
||||
const int QDRANT_VERSION_INDEX = 11;
|
||||
var pathMetadata = Environment.GetMetadataPath();
|
||||
var lines = await File.ReadAllLinesAsync(pathMetadata, Encoding.UTF8);
|
||||
var currentQdrantVersion = lines[QDRANT_VERSION_INDEX].Trim();
|
||||
|
||||
return currentQdrantVersion;
|
||||
}
|
||||
|
||||
private async Task UpdateArchitecture(RID rid)
|
||||
{
|
||||
const int ARCHITECTURE_INDEX = 9;
|
||||
|
||||
@ -27,4 +27,6 @@
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=mime/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=mwais/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=ollama/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=Qdrant/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=qdrant/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/UserDictionary/Words/=tauri_0027s/@EntryIndexedValue">True</s:Boolean></wpf:ResourceDictionary>
|
||||
@ -5011,9 +5011,15 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1282228996"] = "AI Studio runs w
|
||||
-- This library is used to read PDF files. This is necessary, e.g., for using PDFs as a data source for a chat.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1388816916"] = "This library is used to read PDF files. This is necessary, e.g., for using PDFs as a data source for a chat."
|
||||
|
||||
-- Database version
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1420062548"] = "Database version"
|
||||
|
||||
-- This library is used to extend the MudBlazor library. It provides additional components that are not part of the MudBlazor library.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1421513382"] = "This library is used to extend the MudBlazor library. It provides additional components that are not part of the MudBlazor library."
|
||||
|
||||
-- Qdrant is a vector database and vector similarity search engine. We use it to realize local RAG—retrieval-augmented generation—within AI Studio. Thanks for the effort and great work that has been and is being put into Qdrant.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1619832053"] = "Qdrant is a vector database and vector similarity search engine. We use it to realize local RAG—retrieval-augmented generation—within AI Studio. Thanks for the effort and great work that has been and is being put into Qdrant."
|
||||
|
||||
-- We use Lua as the language for plugins. Lua-CSharp lets Lua scripts communicate with AI Studio and vice versa. Thank you, Yusuke Nakada, for this great library.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T162898512"] = "We use Lua as the language for plugins. Lua-CSharp lets Lua scripts communicate with AI Studio and vice versa. Thank you, Yusuke Nakada, for this great library."
|
||||
|
||||
@ -5050,6 +5056,9 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1924365263"] = "This library is
|
||||
-- We use Rocket to implement the runtime API. This is necessary because the runtime must be able to communicate with the user interface (IPC). Rocket is a great framework for implementing web APIs in Rust.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1943216839"] = "We use Rocket to implement the runtime API. This is necessary because the runtime must be able to communicate with the user interface (IPC). Rocket is a great framework for implementing web APIs in Rust."
|
||||
|
||||
-- Copies the following to the clipboard
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T2029659664"] = "Copies the following to the clipboard"
|
||||
|
||||
-- Copies the server URL to the clipboard
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T2037899437"] = "Copies the server URL to the clipboard"
|
||||
|
||||
@ -5095,6 +5104,9 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T2557066213"] = "Used Open Source
|
||||
-- Build time
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T260228112"] = "Build time"
|
||||
|
||||
-- This library is used to create temporary folders for saving the certificate and private key for communication with Qdrant.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T2619858133"] = "This library is used to create temporary folders for saving the certificate and private key for communication with Qdrant."
|
||||
|
||||
-- This crate provides derive macros for Rust enums, which we use to reduce boilerplate when implementing string conversions and metadata for runtime types. This is helpful for the communication between our Rust and .NET systems.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T2635482790"] = "This crate provides derive macros for Rust enums, which we use to reduce boilerplate when implementing string conversions and metadata for runtime types. This is helpful for the communication between our Rust and .NET systems."
|
||||
|
||||
@ -5212,6 +5224,9 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T566998575"] = "This is a library
|
||||
-- Used .NET SDK
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T585329785"] = "Used .NET SDK"
|
||||
|
||||
-- This library is used to manage sidecar processes and to ensure that stale or zombie sidecars are detected and terminated.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T633932150"] = "This library is used to manage sidecar processes and to ensure that stale or zombie sidecars are detected and terminated."
|
||||
|
||||
-- Did you find a bug or are you experiencing issues? Report your concern here.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T639371534"] = "Did you find a bug or are you experiencing issues? Report your concern here."
|
||||
|
||||
|
||||
@ -52,6 +52,7 @@
|
||||
<PackageReference Include="Microsoft.Extensions.FileProviders.Embedded" Version="9.0.12" />
|
||||
<PackageReference Include="MudBlazor" Version="8.15.0" />
|
||||
<PackageReference Include="MudBlazor.Markdown" Version="8.11.0" />
|
||||
<PackageReference Include="Qdrant.Client" Version="1.16.1" />
|
||||
<PackageReference Include="ReverseMarkdown" Version="5.0.0" />
|
||||
<PackageReference Include="LuaCSharp" Version="0.5.3" />
|
||||
</ItemGroup>
|
||||
@ -81,6 +82,7 @@
|
||||
<MetaAppCommitHash>$([System.String]::Copy( $(Metadata) ).Split( ';' )[ 8 ])</MetaAppCommitHash>
|
||||
<MetaArchitecture>$([System.String]::Copy( $(Metadata) ).Split( ';' )[ 9 ])</MetaArchitecture>
|
||||
<MetaPdfiumVersion>$([System.String]::Copy( $(Metadata) ).Split( ';' )[ 10 ])</MetaPdfiumVersion>
|
||||
<MetaQdrantVersion>$([System.String]::Copy( $(Metadata) ).Split( ';' )[ 11 ])</MetaQdrantVersion>
|
||||
|
||||
<GenerateAssemblyInfo>true</GenerateAssemblyInfo>
|
||||
|
||||
@ -108,6 +110,9 @@
|
||||
<AssemblyAttribute Include="AIStudio.Tools.Metadata.MetaDataLibraries">
|
||||
<_Parameter1>$(MetaPdfiumVersion)</_Parameter1>
|
||||
</AssemblyAttribute>
|
||||
<AssemblyAttribute Include="AIStudio.Tools.Metadata.MetaDataDatabases">
|
||||
<_Parameter1>$(MetaQdrantVersion)</_Parameter1>
|
||||
</AssemblyAttribute>
|
||||
</ItemGroup>
|
||||
|
||||
</Target>
|
||||
|
||||
@ -19,6 +19,29 @@
|
||||
<MudListItem T="string" Icon="@Icons.Material.Outlined.Build" Text="@this.VersionDotnetSdk"/>
|
||||
<MudListItem T="string" Icon="@Icons.Material.Outlined.Memory" Text="@this.VersionDotnetRuntime"/>
|
||||
<MudListItem T="string" Icon="@Icons.Material.Outlined.Build" Text="@this.VersionRust"/>
|
||||
<MudListItem T="string" Icon="@Icons.Material.Outlined.Storage">
|
||||
<MudText Typo="Typo.body1">
|
||||
@this.VersionDatabase
|
||||
</MudText>
|
||||
<MudCollapse Expanded="@showDatabaseDetails">
|
||||
<MudText Typo="Typo.body1" Class="mt-2 mb-2">
|
||||
@foreach (var item in this.databaseDisplayInfo)
|
||||
{
|
||||
<div style="display: flex; align-items: center; gap: 8px;">
|
||||
<MudIcon Icon="@Icons.Material.Filled.ArrowRightAlt"/>
|
||||
<span>@item.Label: @item.Value</span>
|
||||
<MudCopyClipboardButton TooltipMessage="@(T("Copies the following to the clipboard")+": "+item.Value)" StringContent=@item.Value/>
|
||||
</div>
|
||||
}
|
||||
</MudText>
|
||||
</MudCollapse>
|
||||
<MudButton StartIcon="@(this.showDatabaseDetails ? Icons.Material.Filled.ExpandLess : Icons.Material.Filled.ExpandMore)"
|
||||
Size="Size.Small"
|
||||
Variant="Variant.Text"
|
||||
OnClick="@this.ToggleDatabaseDetails">
|
||||
@(this.showDatabaseDetails ? T("Hide Details") : T("Show Details"))
|
||||
</MudButton>
|
||||
</MudListItem>
|
||||
<MudListItem T="string" Icon="@Icons.Material.Outlined.DocumentScanner" Text="@this.VersionPdfium"/>
|
||||
<MudListItem T="string" Icon="@Icons.Material.Outlined.Article" Text="@this.versionPandoc"/>
|
||||
<MudListItem T="string" Icon="@Icons.Material.Outlined.Widgets" Text="@MudBlazorVersion"/>
|
||||
@ -194,6 +217,7 @@
|
||||
<ThirdPartyComponent Name="CodeBeam.MudBlazor.Extensions" Developer="Mehmet Can Karagöz & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/CodeBeamOrg/CodeBeam.MudBlazor.Extensions/blob/dev/LICENSE" RepositoryUrl="https://github.com/CodeBeamOrg/CodeBeam.MudBlazor.Extensions" UseCase="@T("This library is used to extend the MudBlazor library. It provides additional components that are not part of the MudBlazor library.")"/>
|
||||
<ThirdPartyComponent Name="Rust" Developer="Graydon Hoare, Rust Foundation, Rust developers & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/rust-lang/rust/blob/master/LICENSE-MIT" RepositoryUrl="https://github.com/rust-lang/rust" UseCase="@T("The .NET backend cannot be started as a desktop app. Therefore, I use a second backend in Rust, which I call runtime. With Rust as the runtime, Tauri can be used to realize a typical desktop app. Thanks to Rust, this app can be offered for Windows, macOS, and Linux desktops. Rust is a great language for developing safe and high-performance software.")"/>
|
||||
<ThirdPartyComponent Name="Tauri" Developer="Daniel Thompson-Yvetot, Lucas Nogueira, Tensor, Boscop, Serge Zaitsev, George Burton & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/tauri-apps/tauri/blob/dev/LICENSE_MIT" RepositoryUrl="https://github.com/tauri-apps/tauri" UseCase="@T("Tauri is used to host the Blazor user interface. It is a great project that allows the creation of desktop applications using web technologies. I love Tauri!")"/>
|
||||
<ThirdPartyComponent Name="Qdrant" Developer="Andrey Vasnetsov, Tim Visée, Arnaud Gourlay, Luis Cossío, Ivan Pleshkov, Roman Titov, xzfc, JojiiOfficial & Open Source Community" LicenseName="Apache-2.0" LicenseUrl="https://github.com/qdrant/qdrant/blob/master/LICENSE" RepositoryUrl="https://github.com/qdrant/qdrant" UseCase="@T("Qdrant is a vector database and vector similarity search engine. We use it to realize local RAG—retrieval-augmented generation—within AI Studio. Thanks for the effort and great work that has been and is being put into Qdrant.")"/>
|
||||
<ThirdPartyComponent Name="Rocket" Developer="Sergio Benitez & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/rwf2/Rocket/blob/master/LICENSE-MIT" RepositoryUrl="https://github.com/rwf2/Rocket" UseCase="@T("We use Rocket to implement the runtime API. This is necessary because the runtime must be able to communicate with the user interface (IPC). Rocket is a great framework for implementing web APIs in Rust.")"/>
|
||||
<ThirdPartyComponent Name="serde" Developer="Erick Tryzelaar, David Tolnay & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/serde-rs/serde/blob/master/LICENSE-MIT" RepositoryUrl="https://github.com/serde-rs/serde" UseCase="@T("Now we have multiple systems, some developed in .NET and others in Rust. The data format JSON is responsible for translating data between both worlds (called data serialization and deserialization). Serde takes on this task in the Rust world. The counterpart in the .NET world is an integral part of .NET and is located in System.Text.Json.")"/>
|
||||
<ThirdPartyComponent Name="strum_macros" Developer="Peter Glotfelty & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/Peternator7/strum/blob/master/LICENSE" RepositoryUrl="https://github.com/Peternator7/strum" UseCase="@T("This crate provides derive macros for Rust enums, which we use to reduce boilerplate when implementing string conversions and metadata for runtime types. This is helpful for the communication between our Rust and .NET systems.")"/>
|
||||
@ -214,6 +238,8 @@
|
||||
<ThirdPartyComponent Name="PDFium" Developer="Lei Zhang, Tom Sepez, Dan Sinclair, and Foxit, Google, Chromium, Collabora, Ada, DocsCorp, Dropbox, Microsoft, and PSPDFKit Teams & Open Source Community" LicenseName="Apache-2.0" LicenseUrl="https://pdfium.googlesource.com/pdfium/+/refs/heads/main/LICENSE" RepositoryUrl="https://pdfium.googlesource.com/pdfium" UseCase="@T("This library is used to read PDF files. This is necessary, e.g., for using PDFs as a data source for a chat.")"/>
|
||||
<ThirdPartyComponent Name="pdfium-render" Developer="Alastair Carey, Dorian Rudolph & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/ajrcarey/pdfium-render/blob/master/LICENSE.md" RepositoryUrl="https://github.com/ajrcarey/pdfium-render" UseCase="@T("This library is used to read PDF files. This is necessary, e.g., for using PDFs as a data source for a chat.")"/>
|
||||
<ThirdPartyComponent Name="sys-locale" Developer="1Password Team, ComplexSpaces & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/1Password/sys-locale/blob/main/LICENSE-MIT" RepositoryUrl="https://github.com/1Password/sys-locale" UseCase="@T("This library is used to determine the language of the operating system. This is necessary to set the language of the user interface.")"/>
|
||||
<ThirdPartyComponent Name="sysinfo" Developer="Guillaume Gomez & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/GuillaumeGomez/sysinfo/blob/main/LICENSE" RepositoryUrl="https://github.com/GuillaumeGomez/sysinfo" UseCase="@T("This library is used to manage sidecar processes and to ensure that stale or zombie sidecars are detected and terminated.")"/>
|
||||
<ThirdPartyComponent Name="tempfile" Developer="Steven Allen, Ashley Mannix & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/Stebalien/tempfile/blob/master/LICENSE-MIT" RepositoryUrl="https://github.com/Stebalien/tempfile" UseCase="@T("This library is used to create temporary folders for saving the certificate and private key for communication with Qdrant.")"/>
|
||||
<ThirdPartyComponent Name="Lua-CSharp" Developer="Yusuke Nakada & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/nuskey8/Lua-CSharp/blob/main/LICENSE" RepositoryUrl="https://github.com/nuskey8/Lua-CSharp" UseCase="@T("We use Lua as the language for plugins. Lua-CSharp lets Lua scripts communicate with AI Studio and vice versa. Thank you, Yusuke Nakada, for this great library.")" />
|
||||
<ThirdPartyComponent Name="HtmlAgilityPack" Developer="ZZZ Projects & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/zzzprojects/html-agility-pack/blob/master/LICENSE" RepositoryUrl="https://github.com/zzzprojects/html-agility-pack" UseCase="@T("We use the HtmlAgilityPack to extract content from the web. This is necessary, e.g., when you provide a URL as input for an assistant.")"/>
|
||||
<ThirdPartyComponent Name="ReverseMarkdown" Developer="Babu Annamalai & Open Source Community" LicenseName="MIT" LicenseUrl="https://github.com/mysticmind/reversemarkdown-net/blob/master/LICENSE" RepositoryUrl="https://github.com/mysticmind/reversemarkdown-net" UseCase="@T("This library is used to convert HTML to Markdown. This is necessary, e.g., when you provide a URL as input for an assistant.")"/>
|
||||
|
||||
@ -2,6 +2,7 @@ using System.Reflection;
|
||||
|
||||
using AIStudio.Components;
|
||||
using AIStudio.Dialogs;
|
||||
using AIStudio.Tools.Databases;
|
||||
using AIStudio.Tools.Metadata;
|
||||
using AIStudio.Tools.PluginSystem;
|
||||
using AIStudio.Tools.Rust;
|
||||
@ -26,10 +27,14 @@ public partial class Information : MSGComponentBase
|
||||
[Inject]
|
||||
private ISnackbar Snackbar { get; init; } = null!;
|
||||
|
||||
[Inject]
|
||||
private DatabaseClient DatabaseClient { get; init; } = null!;
|
||||
|
||||
private static readonly Assembly ASSEMBLY = Assembly.GetExecutingAssembly();
|
||||
private static readonly MetaDataAttribute META_DATA = ASSEMBLY.GetCustomAttribute<MetaDataAttribute>()!;
|
||||
private static readonly MetaDataArchitectureAttribute META_DATA_ARCH = ASSEMBLY.GetCustomAttribute<MetaDataArchitectureAttribute>()!;
|
||||
private static readonly MetaDataLibrariesAttribute META_DATA_LIBRARIES = ASSEMBLY.GetCustomAttribute<MetaDataLibrariesAttribute>()!;
|
||||
private static readonly MetaDataDatabasesAttribute META_DATA_DATABASES = ASSEMBLY.GetCustomAttribute<MetaDataDatabasesAttribute>()!;
|
||||
|
||||
private static string TB(string fallbackEN) => I18N.I.T(fallbackEN, typeof(Information).Namespace, nameof(Information));
|
||||
|
||||
@ -53,6 +58,8 @@ public partial class Information : MSGComponentBase
|
||||
|
||||
private string VersionPdfium => $"{T("Used PDFium version")}: v{META_DATA_LIBRARIES.PdfiumVersion}";
|
||||
|
||||
private string VersionDatabase => $"{T("Database version")}: {this.DatabaseClient.Name} v{META_DATA_DATABASES.DatabaseVersion}";
|
||||
|
||||
private string versionPandoc = TB("Determine Pandoc version, please wait...");
|
||||
private PandocInstallation pandocInstallation;
|
||||
|
||||
@ -60,8 +67,14 @@ public partial class Information : MSGComponentBase
|
||||
|
||||
private bool showEnterpriseConfigDetails;
|
||||
|
||||
private bool showDatabaseDetails;
|
||||
|
||||
private IPluginMetadata? configPlug = PluginFactory.AvailablePlugins.FirstOrDefault(x => x.Type is PluginType.CONFIGURATION);
|
||||
|
||||
private sealed record DatabaseDisplayInfo(string Label, string Value);
|
||||
|
||||
private readonly List<DatabaseDisplayInfo> databaseDisplayInfo = new();
|
||||
|
||||
/// <summary>
|
||||
/// Determines whether the enterprise configuration has details that can be shown/hidden.
|
||||
/// Returns true if there are details available, false otherwise.
|
||||
@ -96,6 +109,11 @@ public partial class Information : MSGComponentBase
|
||||
this.osLanguage = await this.RustService.ReadUserLanguage();
|
||||
this.logPaths = await this.RustService.GetLogPaths();
|
||||
|
||||
await foreach (var (label, value) in this.DatabaseClient.GetDisplayInfo())
|
||||
{
|
||||
this.databaseDisplayInfo.Add(new DatabaseDisplayInfo(label, value));
|
||||
}
|
||||
|
||||
// Determine the Pandoc version may take some time, so we start it here
|
||||
// without waiting for the result:
|
||||
_ = this.DeterminePandocVersion();
|
||||
@ -171,6 +189,11 @@ public partial class Information : MSGComponentBase
|
||||
this.showEnterpriseConfigDetails = !this.showEnterpriseConfigDetails;
|
||||
}
|
||||
|
||||
private void ToggleDatabaseDetails()
|
||||
{
|
||||
this.showDatabaseDetails = !this.showDatabaseDetails;
|
||||
}
|
||||
|
||||
private async Task CopyStartupLogPath()
|
||||
{
|
||||
await this.RustService.CopyText2Clipboard(this.Snackbar, this.logPaths.LogStartupPath);
|
||||
|
||||
@ -5013,9 +5013,15 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1282228996"] = "AI Studio läuft
|
||||
-- This library is used to read PDF files. This is necessary, e.g., for using PDFs as a data source for a chat.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1388816916"] = "Diese Bibliothek wird verwendet, um PDF-Dateien zu lesen. Das ist zum Beispiel notwendig, um PDFs als Datenquelle für einen Chat zu nutzen."
|
||||
|
||||
-- Database version
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1420062548"] = "Datenbankversion"
|
||||
|
||||
-- This library is used to extend the MudBlazor library. It provides additional components that are not part of the MudBlazor library.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1421513382"] = "Diese Bibliothek wird verwendet, um die MudBlazor-Bibliothek zu erweitern. Sie stellt zusätzliche Komponenten bereit, die nicht Teil der MudBlazor-Bibliothek sind."
|
||||
|
||||
-- Qdrant is a vector database and vector similarity search engine. We use it to realize local RAG—retrieval-augmented generation—within AI Studio. Thanks for the effort and great work that has been and is being put into Qdrant.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1619832053"] = "Qdrant ist eine Vektordatenbank und Suchmaschine für Vektoren. Wir nutzen Qdrant, um lokales RAG (Retrieval-Augmented Generation) innerhalb von AI Studio zu realisieren. Vielen Dank für den Einsatz und die großartige Arbeit, die in Qdrant gesteckt wurde und weiterhin gesteckt wird."
|
||||
|
||||
-- We use Lua as the language for plugins. Lua-CSharp lets Lua scripts communicate with AI Studio and vice versa. Thank you, Yusuke Nakada, for this great library.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T162898512"] = "Wir verwenden Lua als Sprache für Plugins. Lua-CSharp ermöglicht die Kommunikation zwischen Lua-Skripten und AI Studio in beide Richtungen. Vielen Dank an Yusuke Nakada für diese großartige Bibliothek."
|
||||
|
||||
@ -5052,6 +5058,9 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1924365263"] = "Diese Bibliothek
|
||||
-- We use Rocket to implement the runtime API. This is necessary because the runtime must be able to communicate with the user interface (IPC). Rocket is a great framework for implementing web APIs in Rust.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1943216839"] = "Wir verwenden Rocket zur Implementierung der Runtime-API. Dies ist notwendig, da die Runtime mit der Benutzeroberfläche (IPC) kommunizieren muss. Rocket ist ein ausgezeichnetes Framework zur Umsetzung von Web-APIs in Rust."
|
||||
|
||||
-- Copies the following to the clipboard
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T2029659664"] = "Kopiert Folgendes in die Zwischenablage"
|
||||
|
||||
-- Copies the server URL to the clipboard
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T2037899437"] = "Kopiert die Server-URL in die Zwischenablage"
|
||||
|
||||
@ -5097,6 +5106,9 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T2557066213"] = "Verwendete Open-
|
||||
-- Build time
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T260228112"] = "Build-Zeit"
|
||||
|
||||
-- This library is used to create temporary folders for saving the certificate and private key for communication with Qdrant.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T2619858133"] = "Diese Bibliothek wird verwendet, um temporäre Ordner zu erstellen, in denen das Zertifikat und der private Schlüssel für die Kommunikation mit Qdrant gespeichert werden."
|
||||
|
||||
-- This crate provides derive macros for Rust enums, which we use to reduce boilerplate when implementing string conversions and metadata for runtime types. This is helpful for the communication between our Rust and .NET systems.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T2635482790"] = "Dieses Crate stellt Derive-Makros für Rust-Enums bereit, die wir verwenden, um Boilerplate zu reduzieren, wenn wir String-Konvertierungen und Metadaten für Laufzeittypen implementieren. Das ist hilfreich für die Kommunikation zwischen unseren Rust- und .NET-Systemen."
|
||||
|
||||
@ -5214,6 +5226,9 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T566998575"] = "Dies ist eine Bib
|
||||
-- Used .NET SDK
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T585329785"] = "Verwendetes .NET SDK"
|
||||
|
||||
-- This library is used to manage sidecar processes and to ensure that stale or zombie sidecars are detected and terminated.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T633932150"] = "Diese Bibliothek wird verwendet, um Sidecar-Prozesse zu verwalten und sicherzustellen, dass veraltete oder Zombie-Sidecars erkannt und beendet werden."
|
||||
|
||||
-- Did you find a bug or are you experiencing issues? Report your concern here.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T639371534"] = "Haben Sie einen Fehler gefunden oder Probleme festgestellt? Melden Sie Ihr Anliegen hier."
|
||||
|
||||
|
||||
@ -5013,9 +5013,15 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1282228996"] = "AI Studio runs w
|
||||
-- This library is used to read PDF files. This is necessary, e.g., for using PDFs as a data source for a chat.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1388816916"] = "This library is used to read PDF files. This is necessary, e.g., for using PDFs as a data source for a chat."
|
||||
|
||||
-- Database version
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1420062548"] = "Database version"
|
||||
|
||||
-- This library is used to extend the MudBlazor library. It provides additional components that are not part of the MudBlazor library.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1421513382"] = "This library is used to extend the MudBlazor library. It provides additional components that are not part of the MudBlazor library."
|
||||
|
||||
-- Qdrant is a vector database and vector similarity search engine. We use it to realize local RAG—retrieval-augmented generation—within AI Studio. Thanks for the effort and great work that has been and is being put into Qdrant.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1619832053"] = "Qdrant is a vector database and vector similarity search engine. We use it to realize local RAG -— retrieval-augmented generation -— within AI Studio. Thanks for the effort and great work that has been and is being put into Qdrant."
|
||||
|
||||
-- We use Lua as the language for plugins. Lua-CSharp lets Lua scripts communicate with AI Studio and vice versa. Thank you, Yusuke Nakada, for this great library.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T162898512"] = "We use Lua as the language for plugins. Lua-CSharp lets Lua scripts communicate with AI Studio and vice versa. Thank you, Yusuke Nakada, for this great library."
|
||||
|
||||
@ -5052,6 +5058,9 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1924365263"] = "This library is
|
||||
-- We use Rocket to implement the runtime API. This is necessary because the runtime must be able to communicate with the user interface (IPC). Rocket is a great framework for implementing web APIs in Rust.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T1943216839"] = "We use Rocket to implement the runtime API. This is necessary because the runtime must be able to communicate with the user interface (IPC). Rocket is a great framework for implementing web APIs in Rust."
|
||||
|
||||
-- Copies the following to the clipboard
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T2029659664"] = "Copies the following to the clipboard"
|
||||
|
||||
-- Copies the server URL to the clipboard
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T2037899437"] = "Copies the server URL to the clipboard"
|
||||
|
||||
@ -5097,6 +5106,9 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T2557066213"] = "Used Open Source
|
||||
-- Build time
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T260228112"] = "Build time"
|
||||
|
||||
-- This library is used to create temporary folders for saving the certificate and private key for communication with Qdrant.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T2619858133"] = "This library is used to create temporary folders for saving the certificate and private key for communication with Qdrant."
|
||||
|
||||
-- This crate provides derive macros for Rust enums, which we use to reduce boilerplate when implementing string conversions and metadata for runtime types. This is helpful for the communication between our Rust and .NET systems.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T2635482790"] = "This crate provides derive macros for Rust enums, which we use to reduce boilerplate when implementing string conversions and metadata for runtime types. This is helpful for the communication between our Rust and .NET systems."
|
||||
|
||||
@ -5214,6 +5226,9 @@ UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T566998575"] = "This is a library
|
||||
-- Used .NET SDK
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T585329785"] = "Used .NET SDK"
|
||||
|
||||
-- This library is used to manage sidecar processes and to ensure that stale or zombie sidecars are detected and terminated.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T633932150"] = "This library is used to manage sidecar processes and to ensure that stale or zombie sidecars are detected and terminated."
|
||||
|
||||
-- Did you find a bug or are you experiencing issues? Report your concern here.
|
||||
UI_TEXT_CONTENT["AISTUDIO::PAGES::INFORMATION::T639371534"] = "Did you find a bug or are you experiencing issues? Report your concern here."
|
||||
|
||||
|
||||
@ -1,5 +1,7 @@
|
||||
using AIStudio.Agents;
|
||||
using AIStudio.Settings;
|
||||
using AIStudio.Tools.Databases;
|
||||
using AIStudio.Tools.Databases.Qdrant;
|
||||
using AIStudio.Tools.PluginSystem;
|
||||
using AIStudio.Tools.Services;
|
||||
|
||||
@ -24,6 +26,7 @@ internal sealed class Program
|
||||
public static string API_TOKEN = null!;
|
||||
public static IServiceProvider SERVICE_PROVIDER = null!;
|
||||
public static ILoggerFactory LOGGER_FACTORY = null!;
|
||||
public static DatabaseClient DATABASE_CLIENT = null!;
|
||||
|
||||
public static async Task Main()
|
||||
{
|
||||
@ -82,6 +85,39 @@ internal sealed class Program
|
||||
return;
|
||||
}
|
||||
|
||||
var qdrantInfo = await rust.GetQdrantInfo();
|
||||
if (qdrantInfo.Path == string.Empty)
|
||||
{
|
||||
Console.WriteLine("Error: Failed to get the Qdrant path from Rust.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (qdrantInfo.PortHttp == 0)
|
||||
{
|
||||
Console.WriteLine("Error: Failed to get the Qdrant HTTP port from Rust.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (qdrantInfo.PortGrpc == 0)
|
||||
{
|
||||
Console.WriteLine("Error: Failed to get the Qdrant gRPC port from Rust.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (qdrantInfo.Fingerprint == string.Empty)
|
||||
{
|
||||
Console.WriteLine("Error: Failed to get the Qdrant fingerprint from Rust.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (qdrantInfo.ApiToken == string.Empty)
|
||||
{
|
||||
Console.WriteLine("Error: Failed to get the Qdrant API token from Rust.");
|
||||
return;
|
||||
}
|
||||
|
||||
var databaseClient = new QdrantClientImplementation("Qdrant", qdrantInfo.Path, qdrantInfo.PortHttp, qdrantInfo.PortGrpc, qdrantInfo.Fingerprint, qdrantInfo.ApiToken);
|
||||
|
||||
var builder = WebApplication.CreateBuilder();
|
||||
builder.WebHost.ConfigureKestrel(kestrelServerOptions =>
|
||||
{
|
||||
@ -133,6 +169,7 @@ internal sealed class Program
|
||||
builder.Services.AddHostedService<UpdateService>();
|
||||
builder.Services.AddHostedService<TemporaryChatService>();
|
||||
builder.Services.AddHostedService<EnterpriseEnvironmentService>();
|
||||
builder.Services.AddSingleton<DatabaseClient>(databaseClient);
|
||||
builder.Services.AddHostedService<GlobalShortcutService>();
|
||||
builder.Services.AddHostedService<RustAvailabilityMonitorService>();
|
||||
|
||||
@ -192,6 +229,10 @@ internal sealed class Program
|
||||
RUST_SERVICE = rust;
|
||||
ENCRYPTION = encryption;
|
||||
|
||||
var databaseLogger = app.Services.GetRequiredService<ILogger<DatabaseClient>>();
|
||||
databaseClient.SetLogger(databaseLogger);
|
||||
DATABASE_CLIENT = databaseClient;
|
||||
|
||||
programLogger.LogInformation("Initialize internal file system.");
|
||||
app.Use(Redirect.HandlerContentAsync);
|
||||
app.Use(FileHandler.HandlerAsync);
|
||||
@ -228,6 +269,7 @@ internal sealed class Program
|
||||
await serverTask;
|
||||
|
||||
RUST_SERVICE.Dispose();
|
||||
DATABASE_CLIENT.Dispose();
|
||||
PluginFactory.Dispose();
|
||||
programLogger.LogInformation("The AI Studio server was stopped.");
|
||||
}
|
||||
|
||||
52
app/MindWork AI Studio/Tools/Databases/DatabaseClient.cs
Normal file
52
app/MindWork AI Studio/Tools/Databases/DatabaseClient.cs
Normal file
@ -0,0 +1,52 @@
|
||||
namespace AIStudio.Tools.Databases;
|
||||
|
||||
public abstract class DatabaseClient(string name, string path)
|
||||
{
|
||||
public string Name => name;
|
||||
|
||||
private string Path => path;
|
||||
|
||||
protected ILogger<DatabaseClient>? logger;
|
||||
|
||||
public abstract IAsyncEnumerable<(string Label, string Value)> GetDisplayInfo();
|
||||
|
||||
public string GetStorageSize()
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(this.Path))
|
||||
{
|
||||
this.logger!.LogError($"Error: Database path '{this.Path}' cannot be null or empty.");
|
||||
return "0 B";
|
||||
}
|
||||
|
||||
if (!Directory.Exists(this.Path))
|
||||
{
|
||||
this.logger!.LogError($"Error: Database path '{this.Path}' does not exist.");
|
||||
return "0 B";
|
||||
}
|
||||
var files = Directory.EnumerateFiles(this.Path, "*", SearchOption.AllDirectories)
|
||||
.Where(file => !System.IO.Path.GetDirectoryName(file)!.Contains("cert", StringComparison.OrdinalIgnoreCase));
|
||||
var size = files.Sum(file => new FileInfo(file).Length);
|
||||
return FormatBytes(size);
|
||||
}
|
||||
|
||||
public static string FormatBytes(long size)
|
||||
{
|
||||
string[] suffixes = { "B", "KB", "MB", "GB", "TB", "PB" };
|
||||
int suffixIndex = 0;
|
||||
|
||||
while (size >= 1024 && suffixIndex < suffixes.Length - 1)
|
||||
{
|
||||
size /= 1024;
|
||||
suffixIndex++;
|
||||
}
|
||||
|
||||
return $"{size:0##} {suffixes[suffixIndex]}";
|
||||
}
|
||||
|
||||
public void SetLogger(ILogger<DatabaseClient> logService)
|
||||
{
|
||||
this.logger = logService;
|
||||
}
|
||||
|
||||
public abstract void Dispose();
|
||||
}
|
||||
@ -0,0 +1,66 @@
|
||||
using Qdrant.Client;
|
||||
using Qdrant.Client.Grpc;
|
||||
|
||||
namespace AIStudio.Tools.Databases.Qdrant;
|
||||
|
||||
public class QdrantClientImplementation : DatabaseClient
|
||||
{
|
||||
private int HttpPort { get; }
|
||||
|
||||
private int GrpcPort { get; }
|
||||
|
||||
private QdrantClient GrpcClient { get; }
|
||||
|
||||
private string Fingerprint { get; }
|
||||
|
||||
private string ApiToken { get; }
|
||||
|
||||
public QdrantClientImplementation(string name, string path, int httpPort, int grpcPort, string fingerprint, string apiToken): base(name, path)
|
||||
{
|
||||
this.HttpPort = httpPort;
|
||||
this.GrpcPort = grpcPort;
|
||||
this.Fingerprint = fingerprint;
|
||||
this.ApiToken = apiToken;
|
||||
this.GrpcClient = this.CreateQdrantClient();
|
||||
}
|
||||
|
||||
private const string IP_ADDRESS = "localhost";
|
||||
|
||||
public QdrantClient CreateQdrantClient()
|
||||
{
|
||||
var address = "https://" + IP_ADDRESS + ":" + this.GrpcPort;
|
||||
var channel = QdrantChannel.ForAddress(address, new ClientConfiguration
|
||||
{
|
||||
ApiKey = this.ApiToken,
|
||||
CertificateThumbprint = this.Fingerprint
|
||||
});
|
||||
var grpcClient = new QdrantGrpcClient(channel);
|
||||
return new QdrantClient(grpcClient);
|
||||
}
|
||||
|
||||
public async Task<string> GetVersion()
|
||||
{
|
||||
var operation = await this.GrpcClient.HealthAsync();
|
||||
return "v"+operation.Version;
|
||||
}
|
||||
|
||||
public async Task<string> GetCollectionsAmount()
|
||||
{
|
||||
var operation = await this.GrpcClient.ListCollectionsAsync();
|
||||
return operation.Count.ToString();
|
||||
}
|
||||
|
||||
public override async IAsyncEnumerable<(string Label, string Value)> GetDisplayInfo()
|
||||
{
|
||||
yield return ("HTTP port", this.HttpPort.ToString());
|
||||
yield return ("gRPC port", this.GrpcPort.ToString());
|
||||
yield return ("Extracted version", await this.GetVersion());
|
||||
yield return ("Storage size", $"{base.GetStorageSize()}");
|
||||
yield return ("Amount of collections", await this.GetCollectionsAmount());
|
||||
}
|
||||
|
||||
public override void Dispose()
|
||||
{
|
||||
this.GrpcClient.Dispose();
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,6 @@
|
||||
namespace AIStudio.Tools.Metadata;
|
||||
|
||||
public class MetaDataDatabasesAttribute(string databaseVersion) : Attribute
|
||||
{
|
||||
public string DatabaseVersion => databaseVersion;
|
||||
}
|
||||
17
app/MindWork AI Studio/Tools/Rust/QdrantInfo.cs
Normal file
17
app/MindWork AI Studio/Tools/Rust/QdrantInfo.cs
Normal file
@ -0,0 +1,17 @@
|
||||
namespace AIStudio.Tools.Rust;
|
||||
|
||||
/// <summary>
|
||||
/// The response of the Qdrant information request.
|
||||
/// </summary>
|
||||
public readonly record struct QdrantInfo
|
||||
{
|
||||
public string Path { get; init; }
|
||||
|
||||
public int PortHttp { get; init; }
|
||||
|
||||
public int PortGrpc { get; init; }
|
||||
|
||||
public string Fingerprint { get; init; }
|
||||
|
||||
public string ApiToken { get; init; }
|
||||
}
|
||||
@ -0,0 +1,25 @@
|
||||
using AIStudio.Tools.Rust;
|
||||
|
||||
namespace AIStudio.Tools.Services;
|
||||
|
||||
public sealed partial class RustService
|
||||
{
|
||||
public async Task<QdrantInfo> GetQdrantInfo()
|
||||
{
|
||||
try
|
||||
{
|
||||
var cts = new CancellationTokenSource(TimeSpan.FromSeconds(45));
|
||||
var response = await this.http.GetFromJsonAsync<QdrantInfo>("/system/qdrant/info", this.jsonRustSerializerOptions, cts.Token);
|
||||
return response;
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
if(this.logger is not null)
|
||||
this.logger.LogError(e, "Error while fetching Qdrant info from Rust service.");
|
||||
else
|
||||
Console.WriteLine($"Error while fetching Qdrant info from Rust service: '{e}'.");
|
||||
|
||||
return default;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1 +1,2 @@
|
||||
# v26.2.2, build 234 (2026-02-xx xx:xx UTC)
|
||||
- Added a vector database (Qdrant) as a building block for our local RAG (retrieval-augmented generation) solution. Thank you very much, Paul (`PaulKoudelka`), for this major contribution. Note that our local RAG implementation remained in preview and has not yet been released; other building blocks are not yet ready.
|
||||
@ -45,7 +45,17 @@ Do you want to test your changes before creating a PR? Follow these steps:
|
||||
9. Execute the command `dotnet run`.
|
||||
10. After compiling the .NET code, the app will finally start inside the Tauri runtime window.
|
||||
|
||||
You can now test your changes.
|
||||
You can now test your changes. To stop the application:
|
||||
- Close the Tauri window (GUI).
|
||||
- Press ``Ctrl+C`` in the terminal where the app is running.
|
||||
- Stop the process via your IDE’s run/debug controls.
|
||||
|
||||
> ⚠️ Important: Stopping the app via ``Ctrl+C`` or the IDE may not terminate the Qdrant sidecar process, especially on Windows. This can lead to startup failures when restarting the app.
|
||||
|
||||
If you encounter issues with restarting Tauri, then manually kill the Qdrant process:
|
||||
- **Linux/macOS:** Run pkill -f qdrant in your terminal.
|
||||
- **Windows:** Open Task Manager → Find qdrant.exe → Right-click → “End task”.
|
||||
- Restart your Tauri app.
|
||||
|
||||
## Create a release
|
||||
In order to create a release:
|
||||
|
||||
@ -9,3 +9,4 @@
|
||||
8f9cd40d060, release
|
||||
osx-arm64
|
||||
144.0.7543.0
|
||||
1.16.3
|
||||
@ -39,13 +39,17 @@ pdfium-render = "0.8.37"
|
||||
sys-locale = "0.3.2"
|
||||
cfg-if = "1.0.4"
|
||||
pptx-to-md = "0.4.0"
|
||||
tempfile = "3.8"
|
||||
strum_macros = "0.27"
|
||||
sysinfo = "0.38.0"
|
||||
|
||||
# Fixes security vulnerability downstream, where the upstream is not fixed yet:
|
||||
url = "2.5.8"
|
||||
ring = "0.17.14"
|
||||
crossbeam-channel = "0.5.15"
|
||||
tracing-subscriber = "0.3.22"
|
||||
tracing-subscriber = "0.3.20"
|
||||
dirs = "6.0.0"
|
||||
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
# See issue https://github.com/tauri-apps/tauri/issues/4470
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
use std::path::PathBuf;
|
||||
use std::path::{PathBuf};
|
||||
|
||||
fn main() {
|
||||
tauri_build::build();
|
||||
|
||||
354
runtime/resources/databases/qdrant/config.yaml
Normal file
354
runtime/resources/databases/qdrant/config.yaml
Normal file
@ -0,0 +1,354 @@
|
||||
log_level: INFO
|
||||
|
||||
# Logging configuration
|
||||
# Qdrant logs to stdout. You may configure to also write logs to a file on disk.
|
||||
# Be aware that this file may grow indefinitely.
|
||||
# logger:
|
||||
# # Logging format, supports `text` and `json`
|
||||
# format: text
|
||||
# on_disk:
|
||||
# enabled: true
|
||||
# log_file: path/to/log/file.log
|
||||
# log_level: INFO
|
||||
# # Logging format, supports `text` and `json`
|
||||
# format: text
|
||||
# buffer_size_bytes: 1024
|
||||
|
||||
storage:
|
||||
|
||||
snapshots_config:
|
||||
# "local" or "s3" - where to store snapshots
|
||||
snapshots_storage: local
|
||||
# s3_config:
|
||||
# bucket: ""
|
||||
# region: ""
|
||||
# access_key: ""
|
||||
# secret_key: ""
|
||||
|
||||
# Where to store temporary files
|
||||
# If null, temporary snapshots are stored in: storage/snapshots_temp/
|
||||
temp_path: null
|
||||
|
||||
# If true - point payloads will not be stored in memory.
|
||||
# It will be read from the disk every time it is requested.
|
||||
# This setting saves RAM by (slightly) increasing the response time.
|
||||
# Note: those payload values that are involved in filtering and are indexed - remain in RAM.
|
||||
#
|
||||
# Default: true
|
||||
on_disk_payload: true
|
||||
|
||||
# Maximum number of concurrent updates to shard replicas
|
||||
# If `null` - maximum concurrency is used.
|
||||
update_concurrency: null
|
||||
|
||||
# Write-ahead-log related configuration
|
||||
wal:
|
||||
# Size of a single WAL segment
|
||||
wal_capacity_mb: 32
|
||||
|
||||
# Number of WAL segments to create ahead of actual data requirement
|
||||
wal_segments_ahead: 0
|
||||
|
||||
# Normal node - receives all updates and answers all queries
|
||||
node_type: "Normal"
|
||||
|
||||
# Listener node - receives all updates, but does not answer search/read queries
|
||||
# Useful for setting up a dedicated backup node
|
||||
# node_type: "Listener"
|
||||
|
||||
performance:
|
||||
# Number of parallel threads used for search operations. If 0 - auto selection.
|
||||
max_search_threads: 0
|
||||
|
||||
# CPU budget, how many CPUs (threads) to allocate for an optimization job.
|
||||
# If 0 - auto selection, keep 1 or more CPUs unallocated depending on CPU size
|
||||
# If negative - subtract this number of CPUs from the available CPUs.
|
||||
# If positive - use this exact number of CPUs.
|
||||
optimizer_cpu_budget: 0
|
||||
|
||||
# Prevent DDoS of too many concurrent updates in distributed mode.
|
||||
# One external update usually triggers multiple internal updates, which breaks internal
|
||||
# timings. For example, the health check timing and consensus timing.
|
||||
# If null - auto selection.
|
||||
update_rate_limit: null
|
||||
|
||||
# Limit for number of incoming automatic shard transfers per collection on this node, does not affect user-requested transfers.
|
||||
# The same value should be used on all nodes in a cluster.
|
||||
# Default is to allow 1 transfer.
|
||||
# If null - allow unlimited transfers.
|
||||
#incoming_shard_transfers_limit: 1
|
||||
|
||||
# Limit for number of outgoing automatic shard transfers per collection on this node, does not affect user-requested transfers.
|
||||
# The same value should be used on all nodes in a cluster.
|
||||
# Default is to allow 1 transfer.
|
||||
# If null - allow unlimited transfers.
|
||||
#outgoing_shard_transfers_limit: 1
|
||||
|
||||
# Enable async scorer which uses io_uring when rescoring.
|
||||
# Only supported on Linux, must be enabled in your kernel.
|
||||
# See: <https://qdrant.tech/articles/io_uring/#and-what-about-qdrant>
|
||||
#async_scorer: false
|
||||
|
||||
optimizers:
|
||||
# The minimal fraction of deleted vectors in a segment, required to perform segment optimization
|
||||
deleted_threshold: 0.2
|
||||
|
||||
# The minimal number of vectors in a segment, required to perform segment optimization
|
||||
vacuum_min_vector_number: 1000
|
||||
|
||||
# Target amount of segments optimizer will try to keep.
|
||||
# Real amount of segments may vary depending on multiple parameters:
|
||||
# - Amount of stored points
|
||||
# - Current write RPS
|
||||
#
|
||||
# It is recommended to select default number of segments as a factor of the number of search threads,
|
||||
# so that each segment would be handled evenly by one of the threads.
|
||||
# If `default_segment_number = 0`, will be automatically selected by the number of available CPUs
|
||||
default_segment_number: 0
|
||||
|
||||
# Do not create segments larger this size (in KiloBytes).
|
||||
# Large segments might require disproportionately long indexation times,
|
||||
# therefore it makes sense to limit the size of segments.
|
||||
#
|
||||
# If indexation speed have more priority for your - make this parameter lower.
|
||||
# If search speed is more important - make this parameter higher.
|
||||
# Note: 1Kb = 1 vector of size 256
|
||||
# If not set, will be automatically selected considering the number of available CPUs.
|
||||
max_segment_size_kb: null
|
||||
|
||||
# Maximum size (in KiloBytes) of vectors allowed for plain index.
|
||||
# Default value based on experiments and observations.
|
||||
# Note: 1Kb = 1 vector of size 256
|
||||
# To explicitly disable vector indexing, set to `0`.
|
||||
# If not set, the default value will be used.
|
||||
indexing_threshold_kb: 10000
|
||||
|
||||
# Interval between forced flushes.
|
||||
flush_interval_sec: 5
|
||||
|
||||
# Max number of threads (jobs) for running optimizations per shard.
|
||||
# Note: each optimization job will also use `max_indexing_threads` threads by itself for index building.
|
||||
# If null - have no limit and choose dynamically to saturate CPU.
|
||||
# If 0 - no optimization threads, optimizations will be disabled.
|
||||
max_optimization_threads: null
|
||||
|
||||
# This section has the same options as 'optimizers' above. All values specified here will overwrite the collections
|
||||
# optimizers configs regardless of the config above and the options specified at collection creation.
|
||||
#optimizers_overwrite:
|
||||
# deleted_threshold: 0.2
|
||||
# vacuum_min_vector_number: 1000
|
||||
# default_segment_number: 0
|
||||
# max_segment_size_kb: null
|
||||
# indexing_threshold_kb: 10000
|
||||
# flush_interval_sec: 5
|
||||
# max_optimization_threads: null
|
||||
|
||||
# Default parameters of HNSW Index. Could be overridden for each collection or named vector individually
|
||||
hnsw_index:
|
||||
# Number of edges per node in the index graph. Larger the value - more accurate the search, more space required.
|
||||
m: 16
|
||||
|
||||
# Number of neighbours to consider during the index building. Larger the value - more accurate the search, more time required to build index.
|
||||
ef_construct: 100
|
||||
|
||||
# Minimal size threshold (in KiloBytes) below which full-scan is preferred over HNSW search.
|
||||
# This measures the total size of vectors being queried against.
|
||||
# When the maximum estimated amount of points that a condition satisfies is smaller than
|
||||
# `full_scan_threshold_kb`, the query planner will use full-scan search instead of HNSW index
|
||||
# traversal for better performance.
|
||||
# Note: 1Kb = 1 vector of size 256
|
||||
full_scan_threshold_kb: 10000
|
||||
|
||||
# Number of parallel threads used for background index building.
|
||||
# If 0 - automatically select.
|
||||
# Best to keep between 8 and 16 to prevent likelihood of building broken/inefficient HNSW graphs.
|
||||
# On small CPUs, less threads are used.
|
||||
max_indexing_threads: 0
|
||||
|
||||
# Store HNSW index on disk. If set to false, index will be stored in RAM. Default: false
|
||||
on_disk: false
|
||||
|
||||
# Custom M param for hnsw graph built for payload index. If not set, default M will be used.
|
||||
payload_m: null
|
||||
|
||||
# Default shard transfer method to use if none is defined.
|
||||
# If null - don't have a shard transfer preference, choose automatically.
|
||||
# If stream_records, snapshot or wal_delta - prefer this specific method.
|
||||
# More info: https://qdrant.tech/documentation/guides/distributed_deployment/#shard-transfer-method
|
||||
shard_transfer_method: null
|
||||
|
||||
# Default parameters for collections
|
||||
collection:
|
||||
# Number of replicas of each shard that network tries to maintain
|
||||
replication_factor: 1
|
||||
|
||||
# How many replicas should apply the operation for us to consider it successful
|
||||
write_consistency_factor: 1
|
||||
|
||||
# Default parameters for vectors.
|
||||
vectors:
|
||||
# Whether vectors should be stored in memory or on disk.
|
||||
on_disk: null
|
||||
|
||||
# shard_number_per_node: 1
|
||||
|
||||
# Default quantization configuration.
|
||||
# More info: https://qdrant.tech/documentation/guides/quantization
|
||||
quantization: null
|
||||
|
||||
# Default strict mode parameters for newly created collections.
|
||||
#strict_mode:
|
||||
# Whether strict mode is enabled for a collection or not.
|
||||
#enabled: false
|
||||
|
||||
# Max allowed `limit` parameter for all APIs that don't have their own max limit.
|
||||
#max_query_limit: null
|
||||
|
||||
# Max allowed `timeout` parameter.
|
||||
#max_timeout: null
|
||||
|
||||
# Allow usage of unindexed fields in retrieval based (eg. search) filters.
|
||||
#unindexed_filtering_retrieve: null
|
||||
|
||||
# Allow usage of unindexed fields in filtered updates (eg. delete by payload).
|
||||
#unindexed_filtering_update: null
|
||||
|
||||
# Max HNSW value allowed in search parameters.
|
||||
#search_max_hnsw_ef: null
|
||||
|
||||
# Whether exact search is allowed or not.
|
||||
#search_allow_exact: null
|
||||
|
||||
# Max oversampling value allowed in search.
|
||||
#search_max_oversampling: null
|
||||
|
||||
# Maximum number of collections allowed to be created
|
||||
# If null - no limit.
|
||||
max_collections: null
|
||||
|
||||
service:
|
||||
# Maximum size of POST data in a single request in megabytes
|
||||
max_request_size_mb: 32
|
||||
|
||||
# Number of parallel workers used for serving the api. If 0 - equal to the number of available cores.
|
||||
# If missing - Same as storage.max_search_threads
|
||||
max_workers: 0
|
||||
|
||||
# Host to bind the service on
|
||||
host: 127.0.0.1
|
||||
|
||||
# HTTP(S) port to bind the service on
|
||||
# http_port: 6333
|
||||
|
||||
# gRPC port to bind the service on.
|
||||
# If `null` - gRPC is disabled. Default: null
|
||||
# Comment to disable gRPC:
|
||||
# grpc_port: 6334
|
||||
|
||||
# Enable CORS headers in REST API.
|
||||
# If enabled, browsers would be allowed to query REST endpoints regardless of query origin.
|
||||
# More info: https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS
|
||||
# Default: true
|
||||
enable_cors: false
|
||||
|
||||
# Enable HTTPS for the REST and gRPC API
|
||||
# TLS is enabled in AI Studio through environment variables when instantiating Qdrant as a sidecar.
|
||||
# enable_tls: false
|
||||
|
||||
# Check user HTTPS client certificate against CA file specified in tls config
|
||||
verify_https_client_certificate: false
|
||||
|
||||
# Set an api-key.
|
||||
# If set, all requests must include a header with the api-key.
|
||||
# example header: `api-key: <API-KEY>`
|
||||
#
|
||||
# If you enable this you should also enable TLS.
|
||||
# (Either above or via an external service like nginx.)
|
||||
# Sending an api-key over an unencrypted channel is insecure.
|
||||
#
|
||||
# Uncomment to enable.
|
||||
# api_key: your_secret_api_key_here
|
||||
|
||||
# Set an api-key for read-only operations.
|
||||
# If set, all requests must include a header with the api-key.
|
||||
# example header: `api-key: <API-KEY>`
|
||||
#
|
||||
# If you enable this you should also enable TLS.
|
||||
# (Either above or via an external service like nginx.)
|
||||
# Sending an api-key over an unencrypted channel is insecure.
|
||||
#
|
||||
# Uncomment to enable.
|
||||
# read_only_api_key: your_secret_read_only_api_key_here
|
||||
|
||||
# Uncomment to enable JWT Role Based Access Control (RBAC).
|
||||
# If enabled, you can generate JWT tokens with fine-grained rules for access control.
|
||||
# Use generated token instead of API key.
|
||||
#
|
||||
# jwt_rbac: true
|
||||
|
||||
# Hardware reporting adds information to the API responses with a
|
||||
# hint on how many resources were used to execute the request.
|
||||
#
|
||||
# Warning: experimental, this feature is still under development and is not supported yet.
|
||||
#
|
||||
# Uncomment to enable.
|
||||
# hardware_reporting: true
|
||||
#
|
||||
# Uncomment to enable.
|
||||
# Prefix for the names of metrics in the /metrics API.
|
||||
# metrics_prefix: qdrant_
|
||||
|
||||
cluster:
|
||||
# Use `enabled: true` to run Qdrant in distributed deployment mode
|
||||
enabled: false
|
||||
|
||||
# Configuration of the inter-cluster communication
|
||||
p2p:
|
||||
# Port for internal communication between peers
|
||||
port: 6335
|
||||
|
||||
# Use TLS for communication between peers
|
||||
enable_tls: false
|
||||
|
||||
# Configuration related to distributed consensus algorithm
|
||||
consensus:
|
||||
# How frequently peers should ping each other.
|
||||
# Setting this parameter to lower value will allow consensus
|
||||
# to detect disconnected nodes earlier, but too frequent
|
||||
# tick period may create significant network and CPU overhead.
|
||||
# We encourage you NOT to change this parameter unless you know what you are doing.
|
||||
tick_period_ms: 100
|
||||
|
||||
# Compact consensus operations once we have this amount of applied
|
||||
# operations. Allows peers to join quickly with a consensus snapshot without
|
||||
# replaying a huge amount of operations.
|
||||
# If 0 - disable compaction
|
||||
compact_wal_entries: 128
|
||||
|
||||
# Set to true to prevent service from sending usage statistics to the developers.
|
||||
# Read more: https://qdrant.tech/documentation/guides/telemetry
|
||||
telemetry_disabled: true
|
||||
|
||||
# TLS configuration.
|
||||
# Required if either service.enable_tls or cluster.p2p.enable_tls is true.
|
||||
tls:
|
||||
# Server certificate chain file
|
||||
# cert: ./tls/cert.pem
|
||||
|
||||
# Server private key file
|
||||
# key: ./tls/key.pem
|
||||
|
||||
# Certificate authority certificate file.
|
||||
# This certificate will be used to validate the certificates
|
||||
# presented by other nodes during inter-cluster communication.
|
||||
#
|
||||
# If verify_https_client_certificate is true, it will verify
|
||||
# HTTPS client certificate
|
||||
#
|
||||
# Required if cluster.p2p.enable_tls is true.
|
||||
ca_cert: ./tls/cacert.pem
|
||||
|
||||
# TTL in seconds to reload certificate from disk, useful for certificate rotations.
|
||||
# Only works for HTTPS endpoints. Does not support gRPC (and intra-cluster communication).
|
||||
# If `null` - TTL is disabled.
|
||||
cert_ttl: 3600
|
||||
@ -1,21 +1,5 @@
|
||||
use log::info;
|
||||
use once_cell::sync::Lazy;
|
||||
use rand::{RngCore, SeedableRng};
|
||||
use rocket::http::Status;
|
||||
use rocket::Request;
|
||||
use rocket::request::FromRequest;
|
||||
|
||||
/// The API token used to authenticate requests.
|
||||
pub static API_TOKEN: Lazy<APIToken> = Lazy::new(|| {
|
||||
let mut token = [0u8; 32];
|
||||
let mut rng = rand_chacha::ChaChaRng::from_os_rng();
|
||||
rng.fill_bytes(&mut token);
|
||||
|
||||
let token = APIToken::from_bytes(token.to_vec());
|
||||
info!("API token was generated successfully.");
|
||||
|
||||
token
|
||||
});
|
||||
use rand_chacha::ChaChaRng;
|
||||
|
||||
/// The API token data structure used to authenticate requests.
|
||||
pub struct APIToken {
|
||||
@ -34,7 +18,7 @@ impl APIToken {
|
||||
}
|
||||
|
||||
/// Creates a new API token from a hexadecimal text.
|
||||
fn from_hex_text(hex_text: &str) -> Self {
|
||||
pub fn from_hex_text(hex_text: &str) -> Self {
|
||||
APIToken {
|
||||
hex_text: hex_text.to_string(),
|
||||
}
|
||||
@ -45,40 +29,14 @@ impl APIToken {
|
||||
}
|
||||
|
||||
/// Validates the received token against the valid token.
|
||||
fn validate(&self, received_token: &Self) -> bool {
|
||||
pub fn validate(&self, received_token: &Self) -> bool {
|
||||
received_token.to_hex_text() == self.to_hex_text()
|
||||
}
|
||||
}
|
||||
|
||||
/// The request outcome type used to handle API token requests.
|
||||
type RequestOutcome<R, T> = rocket::request::Outcome<R, T>;
|
||||
|
||||
/// The request outcome implementation for the API token.
|
||||
#[rocket::async_trait]
|
||||
impl<'r> FromRequest<'r> for APIToken {
|
||||
type Error = APITokenError;
|
||||
|
||||
/// Handles the API token requests.
|
||||
async fn from_request(request: &'r Request<'_>) -> RequestOutcome<Self, Self::Error> {
|
||||
let token = request.headers().get_one("token");
|
||||
match token {
|
||||
Some(token) => {
|
||||
let received_token = APIToken::from_hex_text(token);
|
||||
if API_TOKEN.validate(&received_token) {
|
||||
RequestOutcome::Success(received_token)
|
||||
} else {
|
||||
RequestOutcome::Error((Status::Unauthorized, APITokenError::Invalid))
|
||||
}
|
||||
}
|
||||
|
||||
None => RequestOutcome::Error((Status::Unauthorized, APITokenError::Missing)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The API token error types.
|
||||
#[derive(Debug)]
|
||||
pub enum APITokenError {
|
||||
Missing,
|
||||
Invalid,
|
||||
pub fn generate_api_token() -> APIToken {
|
||||
let mut token = [0u8; 32];
|
||||
let mut rng = ChaChaRng::from_os_rng();
|
||||
rng.fill_bytes(&mut token);
|
||||
APIToken::from_bytes(token.to_vec())
|
||||
}
|
||||
@ -10,15 +10,18 @@ use rocket::serde::Serialize;
|
||||
use serde::Deserialize;
|
||||
use strum_macros::Display;
|
||||
use tauri::updater::UpdateResponse;
|
||||
use tauri::{FileDropEvent, GlobalShortcutManager, UpdaterEvent, RunEvent, Manager, PathResolver, Window, WindowEvent};
|
||||
use tauri::{FileDropEvent, GlobalShortcutManager, UpdaterEvent, RunEvent, Manager, PathResolver, Window, WindowEvent, generate_context};
|
||||
use tauri::api::dialog::blocking::FileDialogBuilder;
|
||||
use tokio::sync::broadcast;
|
||||
use tokio::time;
|
||||
use crate::api_token::APIToken;
|
||||
use crate::dotnet::stop_dotnet_server;
|
||||
use crate::dotnet::{cleanup_dotnet_server, start_dotnet_server, stop_dotnet_server};
|
||||
use crate::environment::{is_prod, is_dev, CONFIG_DIRECTORY, DATA_DIRECTORY};
|
||||
use crate::log::switch_to_file_logging;
|
||||
use crate::pdfium::PDFIUM_LIB_PATH;
|
||||
use crate::qdrant::{cleanup_qdrant, start_qdrant_server, stop_qdrant_server};
|
||||
#[cfg(debug_assertions)]
|
||||
use crate::dotnet::create_startup_env_file;
|
||||
|
||||
/// The Tauri main window.
|
||||
static MAIN_WINDOW: Lazy<Mutex<Option<Window>>> = Lazy::new(|| Mutex::new(None));
|
||||
@ -101,16 +104,28 @@ pub fn start_tauri() {
|
||||
let data_path = data_path.join("data");
|
||||
|
||||
// Get and store the data and config directories:
|
||||
DATA_DIRECTORY.set(data_path.to_str().unwrap().to_string()).map_err(|_| error!("Was not abe to set the data directory.")).unwrap();
|
||||
DATA_DIRECTORY.set(data_path.to_str().unwrap().to_string()).map_err(|_| error!("Was not able to set the data directory.")).unwrap();
|
||||
CONFIG_DIRECTORY.set(app.path_resolver().app_config_dir().unwrap().to_str().unwrap().to_string()).map_err(|_| error!("Was not able to set the config directory.")).unwrap();
|
||||
|
||||
cleanup_qdrant();
|
||||
cleanup_dotnet_server();
|
||||
|
||||
if is_dev() {
|
||||
#[cfg(debug_assertions)]
|
||||
create_startup_env_file();
|
||||
} else {
|
||||
start_dotnet_server();
|
||||
}
|
||||
start_qdrant_server();
|
||||
|
||||
info!(Source = "Bootloader Tauri"; "Reconfigure the file logger to use the app data directory {data_path:?}");
|
||||
switch_to_file_logging(data_path).map_err(|e| error!("Failed to switch logging to file: {e}")).unwrap();
|
||||
set_pdfium_path(app.path_resolver());
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.plugin(tauri_plugin_window_state::Builder::default().build())
|
||||
.build(tauri::generate_context!())
|
||||
.build(generate_context!())
|
||||
.expect("Error while running Tauri application");
|
||||
|
||||
// The app event handler:
|
||||
@ -155,6 +170,7 @@ pub fn start_tauri() {
|
||||
|
||||
if is_prod() {
|
||||
stop_dotnet_server();
|
||||
stop_qdrant_server();
|
||||
} else {
|
||||
warn!(Source = "Tauri"; "Development environment detected; do not stop the .NET server.");
|
||||
}
|
||||
@ -183,6 +199,11 @@ pub fn start_tauri() {
|
||||
|
||||
RunEvent::ExitRequested { .. } => {
|
||||
warn!(Source = "Tauri"; "Run event: exit was requested.");
|
||||
stop_qdrant_server();
|
||||
if is_prod() {
|
||||
warn!("Try to stop the .NET server as well...");
|
||||
stop_dotnet_server();
|
||||
}
|
||||
}
|
||||
|
||||
RunEvent::Ready => {
|
||||
@ -194,10 +215,6 @@ pub fn start_tauri() {
|
||||
});
|
||||
|
||||
warn!(Source = "Tauri"; "Tauri app was stopped.");
|
||||
if is_prod() {
|
||||
warn!("Try to stop the .NET server as well...");
|
||||
stop_dotnet_server();
|
||||
}
|
||||
}
|
||||
|
||||
/// Our event API endpoint for Tauri events. We try to send an endless stream of events to the client.
|
||||
|
||||
@ -1,38 +0,0 @@
|
||||
use std::sync::OnceLock;
|
||||
use log::info;
|
||||
use rcgen::generate_simple_self_signed;
|
||||
use sha2::{Sha256, Digest};
|
||||
|
||||
/// The certificate used for the runtime API server.
|
||||
pub static CERTIFICATE: OnceLock<Vec<u8>> = OnceLock::new();
|
||||
|
||||
/// The private key used for the certificate of the runtime API server.
|
||||
pub static CERTIFICATE_PRIVATE_KEY: OnceLock<Vec<u8>> = OnceLock::new();
|
||||
|
||||
/// The fingerprint of the certificate used for the runtime API server.
|
||||
pub static CERTIFICATE_FINGERPRINT: OnceLock<String> = OnceLock::new();
|
||||
|
||||
/// Generates a TLS certificate for the runtime API server.
|
||||
pub fn generate_certificate() {
|
||||
|
||||
info!("Try to generate a TLS certificate for the runtime API server...");
|
||||
|
||||
let subject_alt_names = vec!["localhost".to_string()];
|
||||
let certificate_data = generate_simple_self_signed(subject_alt_names).unwrap();
|
||||
let certificate_binary_data = certificate_data.cert.der().to_vec();
|
||||
|
||||
let certificate_fingerprint = Sha256::digest(certificate_binary_data).to_vec();
|
||||
let certificate_fingerprint = certificate_fingerprint.iter().fold(String::new(), |mut result, byte| {
|
||||
result.push_str(&format!("{:02x}", byte));
|
||||
result
|
||||
});
|
||||
|
||||
let certificate_fingerprint = certificate_fingerprint.to_uppercase();
|
||||
|
||||
CERTIFICATE_FINGERPRINT.set(certificate_fingerprint.clone()).expect("Could not set the certificate fingerprint.");
|
||||
CERTIFICATE.set(certificate_data.cert.pem().as_bytes().to_vec()).expect("Could not set the certificate.");
|
||||
CERTIFICATE_PRIVATE_KEY.set(certificate_data.signing_key.serialize_pem().as_bytes().to_vec()).expect("Could not set the private key.");
|
||||
|
||||
info!("Certificate fingerprint: '{certificate_fingerprint}'.");
|
||||
info!("Done generating certificate for the runtime API server.");
|
||||
}
|
||||
32
runtime/src/certificate_factory.rs
Normal file
32
runtime/src/certificate_factory.rs
Normal file
@ -0,0 +1,32 @@
|
||||
use log::info;
|
||||
use rcgen::generate_simple_self_signed;
|
||||
use sha2::{Sha256, Digest};
|
||||
|
||||
pub struct Certificate {
|
||||
pub certificate: Vec<u8>,
|
||||
pub private_key: Vec<u8>,
|
||||
pub fingerprint: String,
|
||||
}
|
||||
|
||||
pub fn generate_certificate() -> Certificate {
|
||||
|
||||
let subject_alt_names = vec!["localhost".to_string()];
|
||||
let certificate_data = generate_simple_self_signed(subject_alt_names).unwrap();
|
||||
let certificate_binary_data = certificate_data.cert.der().to_vec();
|
||||
|
||||
let certificate_fingerprint = Sha256::digest(certificate_binary_data).to_vec();
|
||||
let certificate_fingerprint = certificate_fingerprint.iter().fold(String::new(), |mut result, byte| {
|
||||
result.push_str(&format!("{:02x}", byte));
|
||||
result
|
||||
});
|
||||
|
||||
let certificate_fingerprint = certificate_fingerprint.to_uppercase();
|
||||
|
||||
info!("Certificate fingerprint: '{certificate_fingerprint}'.");
|
||||
|
||||
Certificate {
|
||||
certificate: certificate_data.cert.pem().as_bytes().to_vec(),
|
||||
private_key: certificate_data.signing_key.serialize_pem().as_bytes().to_vec(),
|
||||
fingerprint: certificate_fingerprint.clone()
|
||||
}
|
||||
}
|
||||
@ -1,4 +1,5 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use base64::Engine;
|
||||
use base64::prelude::BASE64_STANDARD;
|
||||
@ -7,13 +8,16 @@ use once_cell::sync::Lazy;
|
||||
use rocket::get;
|
||||
use tauri::api::process::{Command, CommandChild, CommandEvent};
|
||||
use tauri::Url;
|
||||
use crate::api_token::{APIToken, API_TOKEN};
|
||||
use crate::api_token::APIToken;
|
||||
use crate::runtime_api_token::API_TOKEN;
|
||||
use crate::app_window::change_location_to;
|
||||
use crate::certificate::CERTIFICATE_FINGERPRINT;
|
||||
use crate::runtime_certificate::CERTIFICATE_FINGERPRINT;
|
||||
use crate::encryption::ENCRYPTION;
|
||||
use crate::environment::is_dev;
|
||||
use crate::environment::{is_dev, DATA_DIRECTORY};
|
||||
use crate::network::get_available_port;
|
||||
use crate::runtime_api::API_SERVER_PORT;
|
||||
use crate::stale_process_cleanup::{kill_stale_process, log_potential_stale_process};
|
||||
use crate::sidecar_types::SidecarType;
|
||||
|
||||
// The .NET server is started in a separate process and communicates with this
|
||||
// runtime process via IPC. However, we do net start the .NET server in
|
||||
@ -26,6 +30,9 @@ static DOTNET_SERVER_PORT: Lazy<u16> = Lazy::new(|| get_available_port().unwrap(
|
||||
|
||||
static DOTNET_INITIALIZED: Lazy<Mutex<bool>> = Lazy::new(|| Mutex::new(false));
|
||||
|
||||
pub const PID_FILE_NAME: &str = "mindwork_ai_studio.pid";
|
||||
const SIDECAR_TYPE:SidecarType = SidecarType::Dotnet;
|
||||
|
||||
/// Returns the desired port of the .NET server. Our .NET app calls this endpoint to get
|
||||
/// the port where the .NET server should listen to.
|
||||
#[get("/system/dotnet/port")]
|
||||
@ -93,9 +100,9 @@ pub fn start_dotnet_server() {
|
||||
.envs(dotnet_server_environment)
|
||||
.spawn()
|
||||
.expect("Failed to spawn .NET server process.");
|
||||
|
||||
let server_pid = child.pid();
|
||||
info!(Source = "Bootloader .NET"; "The .NET server process started with PID={server_pid}.");
|
||||
log_potential_stale_process(Path::new(DATA_DIRECTORY.get().unwrap()).join(PID_FILE_NAME), server_pid, SIDECAR_TYPE);
|
||||
|
||||
// Save the server process to stop it later:
|
||||
*server_spawn_clone.lock().unwrap() = Some(child);
|
||||
@ -108,6 +115,7 @@ pub fn start_dotnet_server() {
|
||||
info!(Source = ".NET Server (stdout)"; "{line}");
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
/// This endpoint is called by the .NET server to signal that the server is ready.
|
||||
@ -152,4 +160,14 @@ pub fn stop_dotnet_server() {
|
||||
} else {
|
||||
warn!("The .NET server process was not started or is already stopped.");
|
||||
}
|
||||
info!("Start dotnet server cleanup");
|
||||
cleanup_dotnet_server();
|
||||
}
|
||||
|
||||
/// Remove old Pid files and kill the corresponding processes
|
||||
pub fn cleanup_dotnet_server() {
|
||||
let pid_path = Path::new(DATA_DIRECTORY.get().unwrap()).join(PID_FILE_NAME);
|
||||
if let Err(e) = kill_stale_process(pid_path, SIDECAR_TYPE) {
|
||||
warn!(Source = ".NET"; "Error during the cleanup of .NET: {}", e);
|
||||
}
|
||||
}
|
||||
@ -8,8 +8,13 @@ pub mod app_window;
|
||||
pub mod secret;
|
||||
pub mod clipboard;
|
||||
pub mod runtime_api;
|
||||
pub mod certificate;
|
||||
pub mod runtime_certificate;
|
||||
pub mod file_data;
|
||||
pub mod metadata;
|
||||
pub mod pdfium;
|
||||
pub mod pandoc;
|
||||
pub mod qdrant;
|
||||
pub mod certificate_factory;
|
||||
pub mod runtime_api_token;
|
||||
pub mod stale_process_cleanup;
|
||||
mod sidecar_types;
|
||||
@ -6,15 +6,12 @@ extern crate core;
|
||||
|
||||
use log::{info, warn};
|
||||
use mindwork_ai_studio::app_window::start_tauri;
|
||||
use mindwork_ai_studio::certificate::{generate_certificate};
|
||||
use mindwork_ai_studio::dotnet::start_dotnet_server;
|
||||
use mindwork_ai_studio::runtime_certificate::{generate_runtime_certificate};
|
||||
use mindwork_ai_studio::environment::is_dev;
|
||||
use mindwork_ai_studio::log::init_logging;
|
||||
use mindwork_ai_studio::metadata::MetaData;
|
||||
use mindwork_ai_studio::runtime_api::start_runtime_api;
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
use mindwork_ai_studio::dotnet::create_startup_env_file;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
@ -38,6 +35,7 @@ async fn main() {
|
||||
info!(".. MudBlazor: v{mud_blazor_version}", mud_blazor_version = metadata.mud_blazor_version);
|
||||
info!(".. Tauri: v{tauri_version}", tauri_version = metadata.tauri_version);
|
||||
info!(".. PDFium: v{pdfium_version}", pdfium_version = metadata.pdfium_version);
|
||||
info!(".. Qdrant: v{qdrant_version}", qdrant_version = metadata.qdrant_version);
|
||||
|
||||
if is_dev() {
|
||||
warn!("Running in development mode.");
|
||||
@ -45,15 +43,8 @@ async fn main() {
|
||||
info!("Running in production mode.");
|
||||
}
|
||||
|
||||
generate_certificate();
|
||||
generate_runtime_certificate();
|
||||
start_runtime_api();
|
||||
|
||||
if is_dev() {
|
||||
#[cfg(debug_assertions)]
|
||||
create_startup_env_file();
|
||||
} else {
|
||||
start_dotnet_server();
|
||||
}
|
||||
|
||||
start_tauri();
|
||||
}
|
||||
@ -16,6 +16,7 @@ pub struct MetaData {
|
||||
pub app_commit_hash: String,
|
||||
pub architecture: String,
|
||||
pub pdfium_version: String,
|
||||
pub qdrant_version: String,
|
||||
}
|
||||
|
||||
impl MetaData {
|
||||
@ -39,6 +40,7 @@ impl MetaData {
|
||||
let app_commit_hash = metadata_lines.next().unwrap();
|
||||
let architecture = metadata_lines.next().unwrap();
|
||||
let pdfium_version = metadata_lines.next().unwrap();
|
||||
let qdrant_version = metadata_lines.next().unwrap();
|
||||
|
||||
let metadata = MetaData {
|
||||
architecture: architecture.to_string(),
|
||||
@ -52,6 +54,7 @@ impl MetaData {
|
||||
rust_version: rust_version.to_string(),
|
||||
tauri_version: tauri_version.to_string(),
|
||||
pdfium_version: pdfium_version.to_string(),
|
||||
qdrant_version: qdrant_version.to_string(),
|
||||
};
|
||||
|
||||
*META_DATA.lock().unwrap() = Some(metadata.clone());
|
||||
|
||||
222
runtime/src/qdrant.rs
Normal file
222
runtime/src/qdrant.rs
Normal file
@ -0,0 +1,222 @@
|
||||
use std::collections::HashMap;
|
||||
use std::{fs};
|
||||
use std::error::Error;
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use std::sync::{Arc, Mutex, OnceLock};
|
||||
use log::{debug, error, info, warn};
|
||||
use once_cell::sync::Lazy;
|
||||
use rocket::get;
|
||||
use rocket::serde::json::Json;
|
||||
use rocket::serde::Serialize;
|
||||
use tauri::api::process::{Command, CommandChild, CommandEvent};
|
||||
use crate::api_token::{APIToken};
|
||||
use crate::environment::DATA_DIRECTORY;
|
||||
use crate::certificate_factory::generate_certificate;
|
||||
use std::path::PathBuf;
|
||||
use tempfile::{TempDir, Builder};
|
||||
use crate::stale_process_cleanup::{kill_stale_process, log_potential_stale_process};
|
||||
use crate::sidecar_types::SidecarType;
|
||||
|
||||
// Qdrant server process started in a separate process and can communicate
|
||||
// via HTTP or gRPC with the .NET server and the runtime process
|
||||
static QDRANT_SERVER: Lazy<Arc<Mutex<Option<CommandChild>>>> = Lazy::new(|| Arc::new(Mutex::new(None)));
|
||||
|
||||
// Qdrant server port (default is 6333 for HTTP and 6334 for gRPC)
|
||||
static QDRANT_SERVER_PORT_HTTP: Lazy<u16> = Lazy::new(|| {
|
||||
crate::network::get_available_port().unwrap_or(6333)
|
||||
});
|
||||
|
||||
static QDRANT_SERVER_PORT_GRPC: Lazy<u16> = Lazy::new(|| {
|
||||
crate::network::get_available_port().unwrap_or(6334)
|
||||
});
|
||||
|
||||
pub static CERTIFICATE_FINGERPRINT: OnceLock<String> = OnceLock::new();
|
||||
static API_TOKEN: Lazy<APIToken> = Lazy::new(|| {
|
||||
crate::api_token::generate_api_token()
|
||||
});
|
||||
|
||||
static TMPDIR: Lazy<Mutex<Option<TempDir>>> = Lazy::new(|| Mutex::new(None));
|
||||
|
||||
const PID_FILE_NAME: &str = "qdrant.pid";
|
||||
const SIDECAR_TYPE:SidecarType = SidecarType::Qdrant;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct ProvideQdrantInfo {
|
||||
path: String,
|
||||
port_http: u16,
|
||||
port_grpc: u16,
|
||||
fingerprint: String,
|
||||
api_token: String,
|
||||
}
|
||||
|
||||
#[get("/system/qdrant/info")]
|
||||
pub fn qdrant_port(_token: APIToken) -> Json<ProvideQdrantInfo> {
|
||||
Json(ProvideQdrantInfo {
|
||||
path: Path::new(DATA_DIRECTORY.get().unwrap()).join("databases").join("qdrant").to_str().unwrap().to_string(),
|
||||
port_http: *QDRANT_SERVER_PORT_HTTP,
|
||||
port_grpc: *QDRANT_SERVER_PORT_GRPC,
|
||||
fingerprint: CERTIFICATE_FINGERPRINT.get().expect("Certificate fingerprint not available").to_string(),
|
||||
api_token: API_TOKEN.to_hex_text().to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Starts the Qdrant server in a separate process.
|
||||
pub fn start_qdrant_server(){
|
||||
|
||||
let base_path = DATA_DIRECTORY.get().unwrap();
|
||||
let path = Path::new(base_path).join("databases").join("qdrant");
|
||||
if !path.exists() {
|
||||
if let Err(e) = fs::create_dir_all(&path){
|
||||
error!(Source="Qdrant"; "The required directory to host the Qdrant database could not be created: {}", e.to_string());
|
||||
};
|
||||
}
|
||||
let (cert_path, key_path) =create_temp_tls_files(&path).unwrap();
|
||||
|
||||
let storage_path = path.join("storage").to_str().unwrap().to_string();
|
||||
let snapshot_path = path.join("snapshots").to_str().unwrap().to_string();
|
||||
let init_path = path.join(".qdrant-initalized").to_str().unwrap().to_string();
|
||||
|
||||
let qdrant_server_environment = HashMap::from_iter([
|
||||
(String::from("QDRANT__SERVICE__HTTP_PORT"), QDRANT_SERVER_PORT_HTTP.to_string()),
|
||||
(String::from("QDRANT__SERVICE__GRPC_PORT"), QDRANT_SERVER_PORT_GRPC.to_string()),
|
||||
(String::from("QDRANT_INIT_FILE_PATH"), init_path),
|
||||
(String::from("QDRANT__STORAGE__STORAGE_PATH"), storage_path),
|
||||
(String::from("QDRANT__STORAGE__SNAPSHOTS_PATH"), snapshot_path),
|
||||
(String::from("QDRANT__TLS__CERT"), cert_path.to_str().unwrap().to_string()),
|
||||
(String::from("QDRANT__TLS__KEY"), key_path.to_str().unwrap().to_string()),
|
||||
(String::from("QDRANT__SERVICE__ENABLE_TLS"), "true".to_string()),
|
||||
(String::from("QDRANT__SERVICE__API_KEY"), API_TOKEN.to_hex_text().to_string()),
|
||||
]);
|
||||
|
||||
let server_spawn_clone = QDRANT_SERVER.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let (mut rx, child) = Command::new_sidecar("qdrant")
|
||||
.expect("Failed to create sidecar for Qdrant")
|
||||
.args(["--config-path", "resources/databases/qdrant/config.yaml"])
|
||||
.envs(qdrant_server_environment)
|
||||
.spawn()
|
||||
.expect("Failed to spawn Qdrant server process.");
|
||||
|
||||
let server_pid = child.pid();
|
||||
info!(Source = "Bootloader Qdrant"; "Qdrant server process started with PID={server_pid}.");
|
||||
log_potential_stale_process(path.join(PID_FILE_NAME), server_pid, SIDECAR_TYPE);
|
||||
|
||||
// Save the server process to stop it later:
|
||||
*server_spawn_clone.lock().unwrap() = Some(child);
|
||||
|
||||
// Log the output of the Qdrant server:
|
||||
while let Some(event) = rx.recv().await {
|
||||
match event {
|
||||
CommandEvent::Stdout(line) => {
|
||||
let line = line.trim_end();
|
||||
if line.contains("INFO") || line.contains("info") {
|
||||
info!(Source = "Qdrant Server"; "{line}");
|
||||
} else if line.contains("WARN") || line.contains("warning") {
|
||||
warn!(Source = "Qdrant Server"; "{line}");
|
||||
} else if line.contains("ERROR") || line.contains("error") {
|
||||
error!(Source = "Qdrant Server"; "{line}");
|
||||
} else {
|
||||
debug!(Source = "Qdrant Server"; "{line}");
|
||||
}
|
||||
},
|
||||
|
||||
CommandEvent::Stderr(line) => {
|
||||
error!(Source = "Qdrant Server (stderr)"; "{line}");
|
||||
},
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// Stops the Qdrant server process.
|
||||
pub fn stop_qdrant_server() {
|
||||
if let Some(server_process) = QDRANT_SERVER.lock().unwrap().take() {
|
||||
let server_kill_result = server_process.kill();
|
||||
match server_kill_result {
|
||||
Ok(_) => warn!(Source = "Qdrant"; "Qdrant server process was stopped."),
|
||||
Err(e) => error!(Source = "Qdrant"; "Failed to stop Qdrant server process: {e}."),
|
||||
}
|
||||
} else {
|
||||
warn!(Source = "Qdrant"; "Qdrant server process was not started or is already stopped.");
|
||||
}
|
||||
|
||||
drop_tmpdir();
|
||||
cleanup_qdrant();
|
||||
}
|
||||
|
||||
/// Create temporary directory with TLS relevant files
|
||||
pub fn create_temp_tls_files(path: &PathBuf) -> Result<(PathBuf, PathBuf), Box<dyn Error>> {
|
||||
let cert = generate_certificate();
|
||||
|
||||
let temp_dir = init_tmpdir_in(path);
|
||||
let cert_path = temp_dir.join("cert.pem");
|
||||
let key_path = temp_dir.join("key.pem");
|
||||
|
||||
let mut cert_file = File::create(&cert_path)?;
|
||||
cert_file.write_all(&*cert.certificate)?;
|
||||
|
||||
let mut key_file = File::create(&key_path)?;
|
||||
key_file.write_all(&*cert.private_key)?;
|
||||
|
||||
CERTIFICATE_FINGERPRINT.set(cert.fingerprint).expect("Could not set the certificate fingerprint.");
|
||||
|
||||
Ok((cert_path, key_path))
|
||||
}
|
||||
|
||||
pub fn init_tmpdir_in<P: AsRef<Path>>(path: P) -> PathBuf {
|
||||
let mut guard = TMPDIR.lock().unwrap();
|
||||
let dir = guard.get_or_insert_with(|| {
|
||||
Builder::new()
|
||||
.prefix("cert-")
|
||||
.tempdir_in(path)
|
||||
.expect("failed to create tempdir")
|
||||
});
|
||||
|
||||
dir.path().to_path_buf()
|
||||
}
|
||||
|
||||
pub fn drop_tmpdir() {
|
||||
let mut guard = TMPDIR.lock().unwrap();
|
||||
*guard = None;
|
||||
warn!(Source = "Qdrant"; "Temporary directory for TLS was dropped.");
|
||||
}
|
||||
|
||||
/// Remove old Pid files and kill the corresponding processes
|
||||
pub fn cleanup_qdrant() {
|
||||
let pid_path = Path::new(DATA_DIRECTORY.get().unwrap()).join("databases").join("qdrant").join(PID_FILE_NAME);
|
||||
if let Err(e) = kill_stale_process(pid_path, SIDECAR_TYPE) {
|
||||
warn!(Source = "Qdrant"; "Error during the cleanup of Qdrant: {}", e);
|
||||
}
|
||||
if let Err(e) = delete_old_certificates() {
|
||||
warn!(Source = "Qdrant"; "Error during the cleanup of Qdrant: {}", e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
pub fn delete_old_certificates() -> Result<(), Box<dyn Error>> {
|
||||
let dir_path = Path::new(DATA_DIRECTORY.get().unwrap()).join("databases").join("qdrant");
|
||||
|
||||
if !dir_path.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
for entry in fs::read_dir(dir_path)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
|
||||
if path.is_dir() {
|
||||
let file_name = entry.file_name();
|
||||
let folder_name = file_name.to_string_lossy();
|
||||
|
||||
if folder_name.starts_with("cert-") {
|
||||
fs::remove_dir_all(&path)?;
|
||||
warn!(Source="Qdrant"; "Removed old certificates in: {}", path.display());
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@ -3,7 +3,7 @@ use once_cell::sync::Lazy;
|
||||
use rocket::config::Shutdown;
|
||||
use rocket::figment::Figment;
|
||||
use rocket::routes;
|
||||
use crate::certificate::{CERTIFICATE, CERTIFICATE_PRIVATE_KEY};
|
||||
use crate::runtime_certificate::{CERTIFICATE, CERTIFICATE_PRIVATE_KEY};
|
||||
use crate::environment::is_dev;
|
||||
use crate::network::get_available_port;
|
||||
|
||||
@ -67,6 +67,7 @@ pub fn start_runtime_api() {
|
||||
.mount("/", routes![
|
||||
crate::dotnet::dotnet_port,
|
||||
crate::dotnet::dotnet_ready,
|
||||
crate::qdrant::qdrant_port,
|
||||
crate::clipboard::set_clipboard,
|
||||
crate::app_window::get_event_stream,
|
||||
crate::app_window::check_for_update,
|
||||
|
||||
40
runtime/src/runtime_api_token.rs
Normal file
40
runtime/src/runtime_api_token.rs
Normal file
@ -0,0 +1,40 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use rocket::http::Status;
|
||||
use rocket::Request;
|
||||
use rocket::request::FromRequest;
|
||||
use crate::api_token::{generate_api_token, APIToken};
|
||||
|
||||
pub static API_TOKEN: Lazy<APIToken> = Lazy::new(|| generate_api_token());
|
||||
|
||||
/// The request outcome type used to handle API token requests.
|
||||
type RequestOutcome<R, T> = rocket::request::Outcome<R, T>;
|
||||
|
||||
/// The request outcome implementation for the API token.
|
||||
#[rocket::async_trait]
|
||||
impl<'r> FromRequest<'r> for APIToken {
|
||||
type Error = APITokenError;
|
||||
|
||||
/// Handles the API token requests.
|
||||
async fn from_request(request: &'r Request<'_>) -> RequestOutcome<Self, Self::Error> {
|
||||
let token = request.headers().get_one("token");
|
||||
match token {
|
||||
Some(token) => {
|
||||
let received_token = APIToken::from_hex_text(token);
|
||||
if API_TOKEN.validate(&received_token) {
|
||||
RequestOutcome::Success(received_token)
|
||||
} else {
|
||||
RequestOutcome::Error((Status::Unauthorized, APITokenError::Invalid))
|
||||
}
|
||||
}
|
||||
|
||||
None => RequestOutcome::Error((Status::Unauthorized, APITokenError::Missing)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The API token error types.
|
||||
#[derive(Debug)]
|
||||
pub enum APITokenError {
|
||||
Missing,
|
||||
Invalid,
|
||||
}
|
||||
26
runtime/src/runtime_certificate.rs
Normal file
26
runtime/src/runtime_certificate.rs
Normal file
@ -0,0 +1,26 @@
|
||||
use std::sync::OnceLock;
|
||||
use log::info;
|
||||
use crate::certificate_factory::generate_certificate;
|
||||
|
||||
/// The certificate used for the runtime API server.
|
||||
pub static CERTIFICATE: OnceLock<Vec<u8>> = OnceLock::new();
|
||||
|
||||
/// The private key used for the certificate of the runtime API server.
|
||||
pub static CERTIFICATE_PRIVATE_KEY: OnceLock<Vec<u8>> = OnceLock::new();
|
||||
|
||||
/// The fingerprint of the certificate used for the runtime API server.
|
||||
pub static CERTIFICATE_FINGERPRINT: OnceLock<String> = OnceLock::new();
|
||||
|
||||
/// Generates a TLS certificate for the runtime API server.
|
||||
pub fn generate_runtime_certificate() {
|
||||
|
||||
info!("Try to generate a TLS certificate for the runtime API server...");
|
||||
|
||||
let cert = generate_certificate();
|
||||
|
||||
CERTIFICATE_FINGERPRINT.set(cert.fingerprint).expect("Could not set the certificate fingerprint.");
|
||||
CERTIFICATE.set(cert.certificate).expect("Could not set the certificate.");
|
||||
CERTIFICATE_PRIVATE_KEY.set(cert.private_key).expect("Could not set the private key.");
|
||||
|
||||
info!("Done generating certificate for the runtime API server.");
|
||||
}
|
||||
15
runtime/src/sidecar_types.rs
Normal file
15
runtime/src/sidecar_types.rs
Normal file
@ -0,0 +1,15 @@
|
||||
use std::fmt;
|
||||
|
||||
pub enum SidecarType {
|
||||
Dotnet,
|
||||
Qdrant,
|
||||
}
|
||||
|
||||
impl fmt::Display for SidecarType {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
SidecarType::Dotnet => write!(f, ".Net"),
|
||||
SidecarType::Qdrant => write!(f, "Qdrant"),
|
||||
}
|
||||
}
|
||||
}
|
||||
89
runtime/src/stale_process_cleanup.rs
Normal file
89
runtime/src/stale_process_cleanup.rs
Normal file
@ -0,0 +1,89 @@
|
||||
use std::fs;
|
||||
use std::fs::File;
|
||||
use std::io::{Error, ErrorKind, Write};
|
||||
use std::path::{PathBuf};
|
||||
use log::{info, warn};
|
||||
use sysinfo::{Pid, ProcessesToUpdate, Signal, System};
|
||||
use crate::sidecar_types::SidecarType;
|
||||
|
||||
fn parse_pid_file(content: &str) -> Result<(u32, String), Error> {
|
||||
let mut lines = content
|
||||
.lines()
|
||||
.map(|line| line.trim())
|
||||
.filter(|line| !line.is_empty());
|
||||
let pid_str = lines
|
||||
.next()
|
||||
.ok_or_else(|| Error::new(ErrorKind::InvalidData, "Missing PID in file"))?;
|
||||
let pid: u32 = pid_str
|
||||
.parse()
|
||||
.map_err(|_| Error::new(ErrorKind::InvalidData, "Invalid PID in file"))?;
|
||||
let name = lines
|
||||
.next()
|
||||
.ok_or_else(|| Error::new(ErrorKind::InvalidData, "Missing process name in file"))?
|
||||
.to_string();
|
||||
Ok((pid, name))
|
||||
}
|
||||
|
||||
pub fn kill_stale_process(pid_file_path: PathBuf, sidecar_type: SidecarType) -> Result<(), Error> {
|
||||
if !pid_file_path.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let pid_file_content = fs::read_to_string(&pid_file_path)?;
|
||||
let (pid, expected_name) = parse_pid_file(&pid_file_content)?;
|
||||
|
||||
let mut system = System::new_all();
|
||||
|
||||
let pid = Pid::from_u32(pid);
|
||||
system.refresh_processes(ProcessesToUpdate::Some(&[pid]), true);
|
||||
if let Some(process) = system.process(pid){
|
||||
let name = process.name().to_string_lossy();
|
||||
if name != expected_name {
|
||||
return Err(Error::new(
|
||||
ErrorKind::InvalidInput,
|
||||
format!(
|
||||
"Process name does not match: expected '{}' but found '{}'",
|
||||
expected_name, name
|
||||
),
|
||||
));
|
||||
}
|
||||
|
||||
let killed = process.kill_with(Signal::Kill).unwrap_or_else(|| process.kill());
|
||||
if !killed {
|
||||
return Err(Error::new(ErrorKind::Other, "Failed to kill process"));
|
||||
}
|
||||
info!(Source="Stale Process Cleanup";"{}: Killed process: \"{}\"", sidecar_type,pid_file_path.display());
|
||||
} else {
|
||||
info!(Source="Stale Process Cleanup";"{}: Pid file with process number '{}' was found, but process was not.", sidecar_type, pid);
|
||||
};
|
||||
|
||||
fs::remove_file(&pid_file_path)?;
|
||||
info!(Source="Stale Process Cleanup";"{}: Deleted redundant Pid file: \"{}\"", sidecar_type,pid_file_path.display());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn log_potential_stale_process(pid_file_path: PathBuf, pid: u32, sidecar_type: SidecarType) {
|
||||
let mut system = System::new_all();
|
||||
let pid = Pid::from_u32(pid);
|
||||
system.refresh_processes(ProcessesToUpdate::Some(&[pid]), true);
|
||||
let Some(process) = system.process(pid) else {
|
||||
warn!(Source="Stale Process Cleanup";
|
||||
"{}: Pid file with process number '{}' was not created because the process was not found.",
|
||||
sidecar_type, pid
|
||||
);
|
||||
return;
|
||||
};
|
||||
|
||||
match File::create(&pid_file_path) {
|
||||
Ok(mut file) => {
|
||||
let name = process.name().to_string_lossy();
|
||||
let content = format!("{pid}\n{name}\n");
|
||||
if let Err(e) = file.write_all(content.as_bytes()) {
|
||||
warn!(Source="Stale Process Cleanup";"{}: Failed to write to \"{}\": {}", sidecar_type,pid_file_path.display(), e);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
warn!(Source="Stale Process Cleanup";"{}: Failed to create \"{}\": {}", sidecar_type, pid_file_path.display(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -20,6 +20,11 @@
|
||||
"name": "../app/MindWork AI Studio/bin/dist/mindworkAIStudioServer",
|
||||
"sidecar": true,
|
||||
"args": true
|
||||
},
|
||||
{
|
||||
"name": "target/databases/qdrant/qdrant",
|
||||
"sidecar": true,
|
||||
"args": true
|
||||
}
|
||||
]
|
||||
},
|
||||
@ -59,7 +64,8 @@
|
||||
"targets": "all",
|
||||
"identifier": "com.github.mindwork-ai.ai-studio",
|
||||
"externalBin": [
|
||||
"../app/MindWork AI Studio/bin/dist/mindworkAIStudioServer"
|
||||
"../app/MindWork AI Studio/bin/dist/mindworkAIStudioServer",
|
||||
"target/databases/qdrant/qdrant"
|
||||
],
|
||||
"resources": [
|
||||
"resources/*"
|
||||
|
||||
Loading…
Reference in New Issue
Block a user