mirror of
https://github.com/MindWorkAI/AI-Studio.git
synced 2025-09-18 19:00:22 +00:00
Some checks are pending
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-unknown-linux-gnu, linux-x64, ubuntu-22.04, x86_64-unknown-linux-gnu, appimage deb updater) (push) Blocked by required conditions
Build and Release / Prepare & create release (push) Blocked by required conditions
Build and Release / Read metadata (push) Waiting to run
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-apple-darwin, osx-arm64, macos-latest, aarch64-apple-darwin, dmg updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-pc-windows-msvc.exe, win-arm64, windows-latest, aarch64-pc-windows-msvc, nsis updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-aarch64-unknown-linux-gnu, linux-arm64, ubuntu-22.04-arm, aarch64-unknown-linux-gnu, appimage deb updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-apple-darwin, osx-x64, macos-latest, x86_64-apple-darwin, dmg updater) (push) Blocked by required conditions
Build and Release / Build app (${{ matrix.dotnet_runtime }}) (-x86_64-pc-windows-msvc.exe, win-x64, windows-latest, x86_64-pc-windows-msvc, nsis updater) (push) Blocked by required conditions
Build and Release / Publish release (push) Blocked by required conditions
45 lines
1.9 KiB
C#
45 lines
1.9 KiB
C#
namespace AIStudio.Provider.OpenAI;
|
|
|
|
/// <summary>
|
|
/// Data model for a delta line in the chat completion response stream.
|
|
/// </summary>
|
|
/// <param name="Id">The id of the response.</param>
|
|
/// <param name="Object">The object describing the response.</param>
|
|
/// <param name="Created">The timestamp of the response.</param>
|
|
/// <param name="Model">The model used for the response.</param>
|
|
/// <param name="SystemFingerprint">The system fingerprint; together with the seed, this allows you to reproduce the response.</param>
|
|
/// <param name="Choices">The choices made by the AI.</param>
|
|
public record ChatCompletionDeltaStreamLine(string Id, string Object, uint Created, string Model, string SystemFingerprint, IList<ChatCompletionChoice> Choices) : IResponseStreamLine
|
|
{
|
|
public ChatCompletionDeltaStreamLine() : this(string.Empty, string.Empty, 0, string.Empty, string.Empty, [])
|
|
{
|
|
}
|
|
|
|
/// <inheritdoc />
|
|
public bool ContainsContent() => this.Choices.Count > 0;
|
|
|
|
/// <inheritdoc />
|
|
public ContentStreamChunk GetContent() => new(this.Choices[0].Delta.Content, []);
|
|
|
|
#region Implementation of IAnnotationStreamLine
|
|
|
|
//
|
|
// Please note that there are multiple options where LLM providers might stream sources:
|
|
//
|
|
// - As part of the delta content while streaming. That would be part of this class.
|
|
// - By using a dedicated stream event and data structure. That would be another class implementing IResponseStreamLine.
|
|
//
|
|
// Right now, OpenAI uses the latter approach, so we don't have any sources here. And
|
|
// because no other provider does it yet, we don't have any implementation here either.
|
|
//
|
|
// One example where sources are part of the delta content is the Perplexity provider.
|
|
//
|
|
|
|
/// <inheritdoc />
|
|
public bool ContainsSources() => false;
|
|
|
|
/// <inheritdoc />
|
|
public IList<ISource> GetSources() => [];
|
|
|
|
#endregion
|
|
} |