mirror of
https://github.com/MindWorkAI/AI-Studio.git
synced 2025-10-08 19:40:21 +00:00
Added support for citations
This commit is contained in:
parent
5574616160
commit
fc3085762d
@ -21,7 +21,7 @@ public sealed class ProviderAlibabaCloud(ILogger logger) : BaseProvider("https:/
|
||||
public override string InstanceName { get; set; } = "AlibabaCloud";
|
||||
|
||||
/// <inheritdoc />
|
||||
public override async IAsyncEnumerable<string> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
|
@ -18,7 +18,7 @@ public sealed class ProviderAnthropic(ILogger logger) : BaseProvider("https://ap
|
||||
public override string InstanceName { get; set; } = "Anthropic";
|
||||
|
||||
/// <inheritdoc />
|
||||
public override async IAsyncEnumerable<string> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
|
@ -13,7 +13,7 @@ public readonly record struct ResponseStreamLine(string Type, int Index, Delta D
|
||||
public bool ContainsContent() => this != default && !string.IsNullOrWhiteSpace(this.Delta.Text);
|
||||
|
||||
/// <inheritdoc />
|
||||
public string GetContent() => this.Delta.Text;
|
||||
public ContentStreamChunk GetContent() => new(this.Delta.Text, []);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
@ -63,7 +63,7 @@ public abstract class BaseProvider : IProvider, ISecretId
|
||||
public abstract string InstanceName { get; set; }
|
||||
|
||||
/// <inheritdoc />
|
||||
public abstract IAsyncEnumerable<string> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, CancellationToken token = default);
|
||||
public abstract IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, CancellationToken token = default);
|
||||
|
||||
/// <inheritdoc />
|
||||
public abstract IAsyncEnumerable<ImageURL> StreamImageCompletion(Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, CancellationToken token = default);
|
||||
@ -96,7 +96,7 @@ public abstract class BaseProvider : IProvider, ISecretId
|
||||
/// <param name="requestBuilder">A function that builds the request.</param>
|
||||
/// <param name="token">The cancellation token.</param>
|
||||
/// <returns>The status object of the request.</returns>
|
||||
protected async Task<HttpRateLimitedStreamResult> SendRequest(Func<Task<HttpRequestMessage>> requestBuilder, CancellationToken token = default)
|
||||
private async Task<HttpRateLimitedStreamResult> SendRequest(Func<Task<HttpRequestMessage>> requestBuilder, CancellationToken token = default)
|
||||
{
|
||||
const int MAX_RETRIES = 6;
|
||||
const double RETRY_DELAY_SECONDS = 4;
|
||||
@ -189,7 +189,7 @@ public abstract class BaseProvider : IProvider, ISecretId
|
||||
return new HttpRateLimitedStreamResult(true, false, string.Empty, response);
|
||||
}
|
||||
|
||||
protected async IAsyncEnumerable<string> StreamChatCompletionInternal<T>(string providerName, Func<Task<HttpRequestMessage>> requestBuilder, [EnumeratorCancellation] CancellationToken token = default) where T : struct, IResponseStreamLine
|
||||
protected async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletionInternal<T>(string providerName, Func<Task<HttpRequestMessage>> requestBuilder, [EnumeratorCancellation] CancellationToken token = default) where T : struct, IResponseStreamLine
|
||||
{
|
||||
StreamReader? streamReader = null;
|
||||
try
|
||||
|
16
app/MindWork AI Studio/Provider/ContentStreamChunk.cs
Normal file
16
app/MindWork AI Studio/Provider/ContentStreamChunk.cs
Normal file
@ -0,0 +1,16 @@
|
||||
namespace AIStudio.Provider;
|
||||
|
||||
/// <summary>
|
||||
/// A chunk of content from a content stream, along with its associated sources.
|
||||
/// </summary>
|
||||
/// <param name="Content">The text content of the chunk.</param>
|
||||
/// <param name="Sources">The list of sources associated with the chunk.</param>
|
||||
public sealed record ContentStreamChunk(string Content, IList<ISource> Sources)
|
||||
{
|
||||
/// <summary>
|
||||
/// Implicit conversion to string.
|
||||
/// </summary>
|
||||
/// <param name="chunk">The content stream chunk.</param>
|
||||
/// <returns>The text content of the chunk.</returns>
|
||||
public static implicit operator string(ContentStreamChunk chunk) => chunk.Content;
|
||||
}
|
@ -20,7 +20,7 @@ public sealed class ProviderDeepSeek(ILogger logger) : BaseProvider("https://api
|
||||
public override string InstanceName { get; set; } = "DeepSeek";
|
||||
|
||||
/// <inheritdoc />
|
||||
public override async IAsyncEnumerable<string> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
|
@ -19,7 +19,7 @@ public class ProviderFireworks(ILogger logger) : BaseProvider("https://api.firew
|
||||
public override string InstanceName { get; set; } = "Fireworks.ai";
|
||||
|
||||
/// <inheritdoc />
|
||||
public override async IAsyncEnumerable<string> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
|
@ -14,7 +14,7 @@ public readonly record struct ResponseStreamLine(string Id, string Object, uint
|
||||
public bool ContainsContent() => this != default && this.Choices.Count > 0;
|
||||
|
||||
/// <inheritdoc />
|
||||
public string GetContent() => this.Choices[0].Delta.Content;
|
||||
public ContentStreamChunk GetContent() => new(this.Choices[0].Delta.Content, []);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
@ -20,7 +20,7 @@ public sealed class ProviderGWDG(ILogger logger) : BaseProvider("https://chat-ai
|
||||
public override string InstanceName { get; set; } = "GWDG SAIA";
|
||||
|
||||
/// <inheritdoc />
|
||||
public override async IAsyncEnumerable<string> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
|
@ -20,7 +20,7 @@ public class ProviderGoogle(ILogger logger) : BaseProvider("https://generativela
|
||||
public override string InstanceName { get; set; } = "Google Gemini";
|
||||
|
||||
/// <inheritdoc />
|
||||
public override async IAsyncEnumerable<string> StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
|
@ -20,7 +20,7 @@ public class ProviderGroq(ILogger logger) : BaseProvider("https://api.groq.com/o
|
||||
public override string InstanceName { get; set; } = "Groq";
|
||||
|
||||
/// <inheritdoc />
|
||||
public override async IAsyncEnumerable<string> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
|
@ -20,7 +20,7 @@ public sealed class ProviderHelmholtz(ILogger logger) : BaseProvider("https://ap
|
||||
public override string InstanceName { get; set; } = "Helmholtz Blablador";
|
||||
|
||||
/// <inheritdoc />
|
||||
public override async IAsyncEnumerable<string> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
|
@ -25,7 +25,7 @@ public sealed class ProviderHuggingFace : BaseProvider
|
||||
public override string InstanceName { get; set; } = "HuggingFace";
|
||||
|
||||
/// <inheritdoc />
|
||||
public override async IAsyncEnumerable<string> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
|
@ -27,7 +27,7 @@ public interface IProvider
|
||||
/// <param name="settingsManager">The settings manager instance to use.</param>
|
||||
/// <param name="token">The cancellation token.</param>
|
||||
/// <returns>The chat completion stream.</returns>
|
||||
public IAsyncEnumerable<string> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, CancellationToken token = default);
|
||||
public IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, CancellationToken token = default);
|
||||
|
||||
/// <summary>
|
||||
/// Starts an image completion stream.
|
||||
|
@ -12,5 +12,17 @@ public interface IResponseStreamLine
|
||||
/// Gets the content of the response line.
|
||||
/// </summary>
|
||||
/// <returns>The content of the response line.</returns>
|
||||
public string GetContent();
|
||||
public ContentStreamChunk GetContent();
|
||||
|
||||
/// <summary>
|
||||
/// Checks if the response line contains any sources.
|
||||
/// </summary>
|
||||
/// <returns>True when the response line contains sources, false otherwise.</returns>
|
||||
public bool ContainsSources() => false;
|
||||
|
||||
/// <summary>
|
||||
/// Gets the sources of the response line.
|
||||
/// </summary>
|
||||
/// <returns>The sources of the response line.</returns>
|
||||
public IList<ISource> GetSources() => [];
|
||||
}
|
17
app/MindWork AI Studio/Provider/ISource.cs
Normal file
17
app/MindWork AI Studio/Provider/ISource.cs
Normal file
@ -0,0 +1,17 @@
|
||||
namespace AIStudio.Provider;
|
||||
|
||||
/// <summary>
|
||||
/// Data model for a source used in the response.
|
||||
/// </summary>
|
||||
public interface ISource
|
||||
{
|
||||
/// <summary>
|
||||
/// The title of the source.
|
||||
/// </summary>
|
||||
public string Title { get; }
|
||||
|
||||
/// <summary>
|
||||
/// The URL of the source.
|
||||
/// </summary>
|
||||
public string URL { get; }
|
||||
}
|
@ -18,7 +18,7 @@ public sealed class ProviderMistral(ILogger logger) : BaseProvider("https://api.
|
||||
public override string InstanceName { get; set; } = "Mistral";
|
||||
|
||||
/// <inheritdoc />
|
||||
public override async IAsyncEnumerable<string> StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
|
@ -19,7 +19,7 @@ public class NoProvider : IProvider
|
||||
|
||||
public Task<IEnumerable<Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default) => Task.FromResult<IEnumerable<Model>>([]);
|
||||
|
||||
public async IAsyncEnumerable<string> StreamChatCompletion(Model chatModel, ChatThread chatChatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
public async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatChatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
await Task.FromResult(0);
|
||||
yield break;
|
||||
|
@ -22,7 +22,7 @@ public sealed class ProviderOpenAI(ILogger logger) : BaseProvider("https://api.o
|
||||
public override string InstanceName { get; set; } = "OpenAI";
|
||||
|
||||
/// <inheritdoc />
|
||||
public override async IAsyncEnumerable<string> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
|
@ -15,7 +15,7 @@ public readonly record struct ResponseStreamLine(string Id, string Object, uint
|
||||
public bool ContainsContent() => this != default && this.Choices.Count > 0;
|
||||
|
||||
/// <inheritdoc />
|
||||
public string GetContent() => this.Choices[0].Delta.Content;
|
||||
public ContentStreamChunk GetContent() => new(this.Choices[0].Delta.Content, []);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
@ -29,7 +29,7 @@ public sealed class ProviderPerplexity(ILogger logger) : BaseProvider("https://a
|
||||
public override string InstanceName { get; set; } = "Perplexity";
|
||||
|
||||
/// <inheritdoc />
|
||||
public override async IAsyncEnumerable<string> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
|
@ -18,7 +18,7 @@ public sealed class ProviderSelfHosted(ILogger logger, Host host, string hostnam
|
||||
public override string InstanceName { get; set; } = "Self-hosted";
|
||||
|
||||
/// <inheritdoc />
|
||||
public override async IAsyncEnumerable<string> StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Provider.Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this, isTrying: true);
|
||||
|
8
app/MindWork AI Studio/Provider/Source.cs
Normal file
8
app/MindWork AI Studio/Provider/Source.cs
Normal file
@ -0,0 +1,8 @@
|
||||
namespace AIStudio.Provider;
|
||||
|
||||
/// <summary>
|
||||
/// Data model for a source used in the response.
|
||||
/// </summary>
|
||||
/// <param name="Title">The title of the source.</param>
|
||||
/// <param name="URL">The URL of the source.</param>
|
||||
public record Source(string Title, string URL) : ISource;
|
35
app/MindWork AI Studio/Provider/SourceExtensions.cs
Normal file
35
app/MindWork AI Studio/Provider/SourceExtensions.cs
Normal file
@ -0,0 +1,35 @@
|
||||
using System.Text;
|
||||
|
||||
using AIStudio.Tools.PluginSystem;
|
||||
|
||||
namespace AIStudio.Provider;
|
||||
|
||||
public static class SourceExtensions
|
||||
{
|
||||
private static string TB(string fallbackEN) => I18N.I.T(fallbackEN, typeof(SourceExtensions).Namespace, nameof(SourceExtensions));
|
||||
|
||||
/// <summary>
|
||||
/// Converts a list of sources to a markdown-formatted string.
|
||||
/// </summary>
|
||||
/// <param name="sources">The list of sources to convert.</param>
|
||||
/// <returns>A markdown-formatted string representing the sources.</returns>
|
||||
public static string ToMarkdown(this IList<Source> sources)
|
||||
{
|
||||
var sb = new StringBuilder();
|
||||
sb.Append("## ");
|
||||
sb.AppendLine(TB("Sources"));
|
||||
|
||||
var sourceNum = 0;
|
||||
foreach (var source in sources)
|
||||
{
|
||||
sb.Append($"- [{++sourceNum}] ");
|
||||
sb.Append('[');
|
||||
sb.Append(source.Title);
|
||||
sb.Append("](");
|
||||
sb.Append(source.URL);
|
||||
sb.AppendLine(")");
|
||||
}
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
}
|
@ -20,7 +20,7 @@ public sealed class ProviderX(ILogger logger) : BaseProvider("https://api.x.ai/v
|
||||
public override string InstanceName { get; set; } = "xAI";
|
||||
|
||||
/// <inheritdoc />
|
||||
public override async IAsyncEnumerable<string> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
public override async IAsyncEnumerable<ContentStreamChunk> StreamChatCompletion(Model chatModel, ChatThread chatThread, SettingsManager settingsManager, [EnumeratorCancellation] CancellationToken token = default)
|
||||
{
|
||||
// Get the API key:
|
||||
var requestedSecret = await RUST_SERVICE.GetAPIKey(this);
|
||||
|
@ -2,6 +2,7 @@
|
||||
- Added support for predefined chat templates in configuration plugins to help enterprises roll out consistent templates across the organization.
|
||||
- Added the ability to choose between automatic and manual update installation to the app settings (default is manual).
|
||||
- Added the ability to control the update installation behavior by configuration plugins.
|
||||
- Added the option for LLM providers to return citations.
|
||||
- Improved memory usage in several areas of the app.
|
||||
- Improved plugin management for configuration plugins so that hot reload detects when a provider or chat template has been removed.
|
||||
- Improved the dialog for naming chats and workspaces to ensure valid inputs are entered.
|
||||
|
Loading…
Reference in New Issue
Block a user