Added support for citations to the chat interface

This commit is contained in:
Thorsten Sommer 2025-08-31 14:14:19 +02:00
parent 8e854469a9
commit da513d423b
Signed by: tsommer
GPG Key ID: 371BBA77A02C0108
10 changed files with 68 additions and 4 deletions

View File

@ -1333,6 +1333,9 @@ UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T1603883875"] = "Yes, re
-- Yes, remove it -- Yes, remove it
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T1820166585"] = "Yes, remove it" UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T1820166585"] = "Yes, remove it"
-- Number of sources
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T1848978959"] = "Number of sources"
-- Do you really want to edit this message? In order to edit this message, the AI response will be deleted. -- Do you really want to edit this message? In order to edit this message, the AI response will be deleted.
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T2018431076"] = "Do you really want to edit this message? In order to edit this message, the AI response will be deleted." UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T2018431076"] = "Do you really want to edit this message? In order to edit this message, the AI response will be deleted."
@ -4861,6 +4864,9 @@ UI_TEXT_CONTENT["AISTUDIO::PROVIDER::LLMPROVIDERSEXTENSIONS::T3424652889"] = "Un
-- no model selected -- no model selected
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODEL::T2234274832"] = "no model selected" UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODEL::T2234274832"] = "no model selected"
-- Sources
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::SOURCEEXTENSIONS::T2730980305"] = "Sources"
-- Use no chat template -- Use no chat template
UI_TEXT_CONTENT["AISTUDIO::SETTINGS::CHATTEMPLATE::T4258819635"] = "Use no chat template" UI_TEXT_CONTENT["AISTUDIO::SETTINGS::CHATTEMPLATE::T4258819635"] = "Use no chat template"

View File

@ -1,6 +1,7 @@
@using AIStudio.Tools @using AIStudio.Tools
@using MudBlazor @using MudBlazor
@using AIStudio.Components @using AIStudio.Components
@using AIStudio.Provider
@inherits AIStudio.Components.MSGComponentBase @inherits AIStudio.Components.MSGComponentBase
<MudCard Class="@this.CardClasses" Outlined="@true"> <MudCard Class="@this.CardClasses" Outlined="@true">
<MudCardHeader> <MudCardHeader>
@ -15,6 +16,14 @@
</MudText> </MudText>
</CardHeaderContent> </CardHeaderContent>
<CardHeaderActions> <CardHeaderActions>
@if (this.Content.Sources.Count > 0)
{
<MudTooltip Text="@T("Number of sources")" Placement="Placement.Bottom">
<MudBadge Content="@this.Content.Sources.Count" Color="Color.Primary" Overlap="true" BadgeClass="sources-card-header">
<MudIconButton Icon="@Icons.Material.Filled.Link" />
</MudBadge>
</MudTooltip>
}
@if (this.IsSecondToLastBlock && this.Role is ChatRole.USER && this.EditLastUserBlockFunc is not null) @if (this.IsSecondToLastBlock && this.Role is ChatRole.USER && this.EditLastUserBlockFunc is not null)
{ {
<MudTooltip Text="@T("Edit")" Placement="Placement.Bottom"> <MudTooltip Text="@T("Edit")" Placement="Placement.Bottom">
@ -72,6 +81,10 @@
else else
{ {
<MudMarkdown Value="@textContent.Text.RemoveThinkTags().Trim()" Props="Markdown.DefaultConfig" Styling="@this.MarkdownStyling" /> <MudMarkdown Value="@textContent.Text.RemoveThinkTags().Trim()" Props="Markdown.DefaultConfig" Styling="@this.MarkdownStyling" />
@if (textContent.Sources.Count > 0)
{
<MudMarkdown Value="@textContent.Sources.ToMarkdown()" Props="Markdown.DefaultConfig" Styling="@this.MarkdownStyling" />
}
} }
} }
} }

View File

@ -27,6 +27,9 @@ public sealed class ContentImage : IContent, IImageSource
[JsonIgnore] [JsonIgnore]
public Func<Task> StreamingEvent { get; set; } = () => Task.CompletedTask; public Func<Task> StreamingEvent { get; set; } = () => Task.CompletedTask;
/// <inheritdoc />
public List<Source> Sources { get; set; } = [];
/// <inheritdoc /> /// <inheritdoc />
public Task<ChatThread> CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastPrompt, ChatThread? chatChatThread, CancellationToken token = default) public Task<ChatThread> CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastPrompt, ChatThread? chatChatThread, CancellationToken token = default)
{ {

View File

@ -24,7 +24,7 @@ public sealed class ContentText : IContent
public bool InitialRemoteWait { get; set; } public bool InitialRemoteWait { get; set; }
/// <inheritdoc /> /// <inheritdoc />
// [JsonIgnore] [JsonIgnore]
public bool IsStreaming { get; set; } public bool IsStreaming { get; set; }
/// <inheritdoc /> /// <inheritdoc />
@ -35,6 +35,9 @@ public sealed class ContentText : IContent
[JsonIgnore] [JsonIgnore]
public Func<Task> StreamingEvent { get; set; } = () => Task.CompletedTask; public Func<Task> StreamingEvent { get; set; } = () => Task.CompletedTask;
/// <inheritdoc />
public List<Source> Sources { get; set; } = [];
/// <inheritdoc /> /// <inheritdoc />
public async Task<ChatThread> CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastPrompt, ChatThread? chatThread, CancellationToken token = default) public async Task<ChatThread> CreateFromProviderAsync(IProvider provider, Model chatModel, IContent? lastPrompt, ChatThread? chatThread, CancellationToken token = default)
{ {
@ -80,7 +83,7 @@ public sealed class ContentText : IContent
this.InitialRemoteWait = true; this.InitialRemoteWait = true;
// Iterate over the responses from the AI: // Iterate over the responses from the AI:
await foreach (var deltaText in provider.StreamChatCompletion(chatModel, chatThread, settings, token)) await foreach (var contentStreamChunk in provider.StreamChatCompletion(chatModel, chatThread, settings, token))
{ {
// When the user cancels the request, we stop the loop: // When the user cancels the request, we stop the loop:
if (token.IsCancellationRequested) if (token.IsCancellationRequested)
@ -91,7 +94,10 @@ public sealed class ContentText : IContent
this.IsStreaming = true; this.IsStreaming = true;
// Add the response to the text: // Add the response to the text:
this.Text += deltaText; this.Text += contentStreamChunk;
// Merge the sources:
this.Sources.MergeSources(contentStreamChunk.Sources);
// Notify the UI that the content has changed, // Notify the UI that the content has changed,
// depending on the energy saving mode: // depending on the energy saving mode:

View File

@ -37,6 +37,12 @@ public interface IContent
/// </summary> /// </summary>
[JsonIgnore] [JsonIgnore]
public Func<Task> StreamingDone { get; set; } public Func<Task> StreamingDone { get; set; }
/// <summary>
/// The provided sources, if any.
/// </summary>
[JsonIgnore]
public List<Source> Sources { get; set; }
/// <summary> /// <summary>
/// Uses the provider to create the content. /// Uses the provider to create the content.

View File

@ -1335,6 +1335,9 @@ UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T1603883875"] = "Ja, neu
-- Yes, remove it -- Yes, remove it
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T1820166585"] = "Ja, entferne es" UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T1820166585"] = "Ja, entferne es"
-- Number of sources
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T1848978959"] = "Anzahl der Quellen"
-- Do you really want to edit this message? In order to edit this message, the AI response will be deleted. -- Do you really want to edit this message? In order to edit this message, the AI response will be deleted.
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T2018431076"] = "Möchten Sie diese Nachricht wirklich bearbeiten? Um die Nachricht zu bearbeiten, wird die Antwort der KI gelöscht." UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T2018431076"] = "Möchten Sie diese Nachricht wirklich bearbeiten? Um die Nachricht zu bearbeiten, wird die Antwort der KI gelöscht."
@ -4863,6 +4866,9 @@ UI_TEXT_CONTENT["AISTUDIO::PROVIDER::LLMPROVIDERSEXTENSIONS::T3424652889"] = "Un
-- no model selected -- no model selected
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODEL::T2234274832"] = "Kein Modell ausgewählt" UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODEL::T2234274832"] = "Kein Modell ausgewählt"
-- Sources
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::SOURCEEXTENSIONS::T2730980305"] = "Quellen"
-- Use no chat template -- Use no chat template
UI_TEXT_CONTENT["AISTUDIO::SETTINGS::CHATTEMPLATE::T4258819635"] = "Keine Chat-Vorlage verwenden" UI_TEXT_CONTENT["AISTUDIO::SETTINGS::CHATTEMPLATE::T4258819635"] = "Keine Chat-Vorlage verwenden"

View File

@ -1335,6 +1335,9 @@ UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T1603883875"] = "Yes, re
-- Yes, remove it -- Yes, remove it
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T1820166585"] = "Yes, remove it" UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T1820166585"] = "Yes, remove it"
-- Number of sources
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T1848978959"] = "Number of sources"
-- Do you really want to edit this message? In order to edit this message, the AI response will be deleted. -- Do you really want to edit this message? In order to edit this message, the AI response will be deleted.
UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T2018431076"] = "Do you really want to edit this message? In order to edit this message, the AI response will be deleted." UI_TEXT_CONTENT["AISTUDIO::CHAT::CONTENTBLOCKCOMPONENT::T2018431076"] = "Do you really want to edit this message? In order to edit this message, the AI response will be deleted."
@ -4863,6 +4866,9 @@ UI_TEXT_CONTENT["AISTUDIO::PROVIDER::LLMPROVIDERSEXTENSIONS::T3424652889"] = "Un
-- no model selected -- no model selected
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODEL::T2234274832"] = "no model selected" UI_TEXT_CONTENT["AISTUDIO::PROVIDER::MODEL::T2234274832"] = "no model selected"
-- Sources
UI_TEXT_CONTENT["AISTUDIO::PROVIDER::SOURCEEXTENSIONS::T2730980305"] = "Sources"
-- Use no chat template -- Use no chat template
UI_TEXT_CONTENT["AISTUDIO::SETTINGS::CHATTEMPLATE::T4258819635"] = "Use no chat template" UI_TEXT_CONTENT["AISTUDIO::SETTINGS::CHATTEMPLATE::T4258819635"] = "Use no chat template"

View File

@ -32,4 +32,16 @@ public static class SourceExtensions
return sb.ToString(); return sb.ToString();
} }
/// <summary>
/// Merges a list of added sources into an existing list of sources, avoiding duplicates based on URL and Title.
/// </summary>
/// <param name="sources">The existing list of sources to merge into.</param>
/// <param name="addedSources">The list of sources to add.</param>
public static void MergeSources(this IList<Source> sources, IList<ISource> addedSources)
{
foreach (var addedSource in addedSources)
if (sources.All(s => s.URL != addedSource.URL && s.Title != addedSource.Title))
sources.Add((Source)addedSource);
}
} }

View File

@ -140,4 +140,9 @@
.no-elevation { .no-elevation {
box-shadow: none !important; box-shadow: none !important;
}
.sources-card-header {
top: 0em !important;
left: 2.2em !important;
} }

View File

@ -2,7 +2,8 @@
- Added support for predefined chat templates in configuration plugins to help enterprises roll out consistent templates across the organization. - Added support for predefined chat templates in configuration plugins to help enterprises roll out consistent templates across the organization.
- Added the ability to choose between automatic and manual update installation to the app settings (default is manual). - Added the ability to choose between automatic and manual update installation to the app settings (default is manual).
- Added the ability to control the update installation behavior by configuration plugins. - Added the ability to control the update installation behavior by configuration plugins.
- Added the option for LLM providers to return citations. - Added the option for LLM providers to stream citations or sources.
- Added support for citations to the chat interface. This feature is invisible unless an LLM model is streaming citations or sources.
- Improved memory usage in several areas of the app. - Improved memory usage in several areas of the app.
- Improved plugin management for configuration plugins so that hot reload detects when a provider or chat template has been removed. - Improved plugin management for configuration plugins so that hot reload detects when a provider or chat template has been removed.
- Improved the dialog for naming chats and workspaces to ensure valid inputs are entered. - Improved the dialog for naming chats and workspaces to ensure valid inputs are entered.