mirror of
https://github.com/MindWorkAI/AI-Studio.git
synced 2025-04-28 15:39:46 +00:00
Added Fireworks as provider
This commit is contained in:
parent
a2009137f7
commit
e21d734cdd
13
app/MindWork AI Studio/Provider/Fireworks/ChatRequest.cs
Normal file
13
app/MindWork AI Studio/Provider/Fireworks/ChatRequest.cs
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
namespace AIStudio.Provider.Fireworks;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// The Fireworks chat request model.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="Model">Which model to use for chat completion.</param>
|
||||||
|
/// <param name="Messages">The chat messages.</param>
|
||||||
|
/// <param name="Stream">Whether to stream the chat completion.</param>
|
||||||
|
public readonly record struct ChatRequest(
|
||||||
|
string Model,
|
||||||
|
IList<Message> Messages,
|
||||||
|
bool Stream
|
||||||
|
);
|
8
app/MindWork AI Studio/Provider/Fireworks/Message.cs
Normal file
8
app/MindWork AI Studio/Provider/Fireworks/Message.cs
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
namespace AIStudio.Provider.Fireworks;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Chat message model.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="Content">The text content of the message.</param>
|
||||||
|
/// <param name="Role">The role of the message.</param>
|
||||||
|
public readonly record struct Message(string Content, string Role);
|
160
app/MindWork AI Studio/Provider/Fireworks/ProviderFireworks.cs
Normal file
160
app/MindWork AI Studio/Provider/Fireworks/ProviderFireworks.cs
Normal file
@ -0,0 +1,160 @@
|
|||||||
|
using System.Net.Http.Headers;
|
||||||
|
using System.Runtime.CompilerServices;
|
||||||
|
using System.Text;
|
||||||
|
using System.Text.Json;
|
||||||
|
|
||||||
|
using AIStudio.Chat;
|
||||||
|
using AIStudio.Settings;
|
||||||
|
|
||||||
|
namespace AIStudio.Provider.Fireworks;
|
||||||
|
|
||||||
|
public class ProviderFireworks() : BaseProvider("https://api.fireworks.ai/inference/v1/"), IProvider
|
||||||
|
{
|
||||||
|
private static readonly JsonSerializerOptions JSON_SERIALIZER_OPTIONS = new()
|
||||||
|
{
|
||||||
|
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
|
||||||
|
};
|
||||||
|
|
||||||
|
#region Implementation of IProvider
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public string Id => "Fireworks.ai";
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public string InstanceName { get; set; } = "Fireworks.ai";
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public async IAsyncEnumerable<string> StreamChatCompletion(IJSRuntime jsRuntime, SettingsManager settings, Model chatModel, ChatThread chatThread, [EnumeratorCancellation] CancellationToken token = default)
|
||||||
|
{
|
||||||
|
// Get the API key:
|
||||||
|
var requestedSecret = await settings.GetAPIKey(jsRuntime, this);
|
||||||
|
if(!requestedSecret.Success)
|
||||||
|
yield break;
|
||||||
|
|
||||||
|
// Prepare the system prompt:
|
||||||
|
var systemPrompt = new Message
|
||||||
|
{
|
||||||
|
Role = "system",
|
||||||
|
Content = chatThread.SystemPrompt,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Prepare the Fireworks HTTP chat request:
|
||||||
|
var fireworksChatRequest = JsonSerializer.Serialize(new ChatRequest
|
||||||
|
{
|
||||||
|
Model = chatModel.Id,
|
||||||
|
|
||||||
|
// Build the messages:
|
||||||
|
// - First of all the system prompt
|
||||||
|
// - Then none-empty user and AI messages
|
||||||
|
Messages = [systemPrompt, ..chatThread.Blocks.Where(n => n.ContentType is ContentType.TEXT && !string.IsNullOrWhiteSpace((n.Content as ContentText)?.Text)).Select(n => new Message
|
||||||
|
{
|
||||||
|
Role = n.Role switch
|
||||||
|
{
|
||||||
|
ChatRole.USER => "user",
|
||||||
|
ChatRole.AI => "assistant",
|
||||||
|
ChatRole.SYSTEM => "system",
|
||||||
|
|
||||||
|
_ => "user",
|
||||||
|
},
|
||||||
|
|
||||||
|
Content = n.Content switch
|
||||||
|
{
|
||||||
|
ContentText text => text.Text,
|
||||||
|
_ => string.Empty,
|
||||||
|
}
|
||||||
|
}).ToList()],
|
||||||
|
|
||||||
|
// Right now, we only support streaming completions:
|
||||||
|
Stream = true,
|
||||||
|
}, JSON_SERIALIZER_OPTIONS);
|
||||||
|
|
||||||
|
// Build the HTTP post request:
|
||||||
|
var request = new HttpRequestMessage(HttpMethod.Post, "chat/completions");
|
||||||
|
|
||||||
|
// Set the authorization header:
|
||||||
|
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", requestedSecret.Secret);
|
||||||
|
|
||||||
|
// Set the content:
|
||||||
|
request.Content = new StringContent(fireworksChatRequest, Encoding.UTF8, "application/json");
|
||||||
|
|
||||||
|
// Send the request with the ResponseHeadersRead option.
|
||||||
|
// This allows us to read the stream as soon as the headers are received.
|
||||||
|
// This is important because we want to stream the responses.
|
||||||
|
var response = await this.httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, token);
|
||||||
|
|
||||||
|
// Open the response stream:
|
||||||
|
var fireworksStream = await response.Content.ReadAsStreamAsync(token);
|
||||||
|
|
||||||
|
// Add a stream reader to read the stream, line by line:
|
||||||
|
var streamReader = new StreamReader(fireworksStream);
|
||||||
|
|
||||||
|
// Read the stream, line by line:
|
||||||
|
while(!streamReader.EndOfStream)
|
||||||
|
{
|
||||||
|
// Check if the token is canceled:
|
||||||
|
if(token.IsCancellationRequested)
|
||||||
|
yield break;
|
||||||
|
|
||||||
|
// Read the next line:
|
||||||
|
var line = await streamReader.ReadLineAsync(token);
|
||||||
|
|
||||||
|
// Skip empty lines:
|
||||||
|
if(string.IsNullOrWhiteSpace(line))
|
||||||
|
continue;
|
||||||
|
|
||||||
|
// Skip lines that do not start with "data: ". Regard
|
||||||
|
// to the specification, we only want to read the data lines:
|
||||||
|
if(!line.StartsWith("data: ", StringComparison.InvariantCulture))
|
||||||
|
continue;
|
||||||
|
|
||||||
|
// Check if the line is the end of the stream:
|
||||||
|
if (line.StartsWith("data: [DONE]", StringComparison.InvariantCulture))
|
||||||
|
yield break;
|
||||||
|
|
||||||
|
ResponseStreamLine fireworksResponse;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// We know that the line starts with "data: ". Hence, we can
|
||||||
|
// skip the first 6 characters to get the JSON data after that.
|
||||||
|
var jsonData = line[6..];
|
||||||
|
|
||||||
|
// Deserialize the JSON data:
|
||||||
|
fireworksResponse = JsonSerializer.Deserialize<ResponseStreamLine>(jsonData, JSON_SERIALIZER_OPTIONS);
|
||||||
|
}
|
||||||
|
catch
|
||||||
|
{
|
||||||
|
// Skip invalid JSON data:
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip empty responses:
|
||||||
|
if(fireworksResponse == default || fireworksResponse.Choices.Count == 0)
|
||||||
|
continue;
|
||||||
|
|
||||||
|
// Yield the response:
|
||||||
|
yield return fireworksResponse.Choices[0].Delta.Content;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||||
|
/// <inheritdoc />
|
||||||
|
public async IAsyncEnumerable<ImageURL> StreamImageCompletion(IJSRuntime jsRuntime, SettingsManager settings, Model imageModel, string promptPositive, string promptNegative = FilterOperator.String.Empty, ImageURL referenceImageURL = default, [EnumeratorCancellation] CancellationToken token = default)
|
||||||
|
{
|
||||||
|
yield break;
|
||||||
|
}
|
||||||
|
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public Task<IEnumerable<Model>> GetTextModels(IJSRuntime jsRuntime, SettingsManager settings, string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
|
{
|
||||||
|
return Task.FromResult(Enumerable.Empty<Model>());
|
||||||
|
}
|
||||||
|
|
||||||
|
/// <inheritdoc />
|
||||||
|
public Task<IEnumerable<Model>> GetImageModels(IJSRuntime jsRuntime, SettingsManager settings, string? apiKeyProvisional = null, CancellationToken token = default)
|
||||||
|
{
|
||||||
|
return Task.FromResult(Enumerable.Empty<Model>());
|
||||||
|
}
|
||||||
|
|
||||||
|
#endregion
|
||||||
|
}
|
@ -0,0 +1,24 @@
|
|||||||
|
namespace AIStudio.Provider.Fireworks;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Data model for a line in the response stream, for streaming completions.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="Id">The id of the response.</param>
|
||||||
|
/// <param name="Object">The object describing the response.</param>
|
||||||
|
/// <param name="Created">The timestamp of the response.</param>
|
||||||
|
/// <param name="Model">The model used for the response.</param>
|
||||||
|
/// <param name="Choices">The choices made by the AI.</param>
|
||||||
|
public readonly record struct ResponseStreamLine(string Id, string Object, uint Created, string Model, IList<Choice> Choices);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// Data model for a choice made by the AI.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="Index">The index of the choice.</param>
|
||||||
|
/// <param name="Delta">The delta text of the choice.</param>
|
||||||
|
public readonly record struct Choice(int Index, Delta Delta);
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// The delta text of a choice.
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="Content">The content of the delta text.</param>
|
||||||
|
public readonly record struct Delta(string Content);
|
@ -1,4 +1,5 @@
|
|||||||
using AIStudio.Provider.Anthropic;
|
using AIStudio.Provider.Anthropic;
|
||||||
|
using AIStudio.Provider.Fireworks;
|
||||||
using AIStudio.Provider.Mistral;
|
using AIStudio.Provider.Mistral;
|
||||||
using AIStudio.Provider.OpenAI;
|
using AIStudio.Provider.OpenAI;
|
||||||
using AIStudio.Provider.SelfHosted;
|
using AIStudio.Provider.SelfHosted;
|
||||||
@ -10,13 +11,15 @@ namespace AIStudio.Provider;
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
public enum Providers
|
public enum Providers
|
||||||
{
|
{
|
||||||
NONE,
|
NONE = 0,
|
||||||
|
|
||||||
OPEN_AI,
|
OPEN_AI = 1,
|
||||||
ANTHROPIC,
|
ANTHROPIC = 2,
|
||||||
MISTRAL,
|
MISTRAL = 3,
|
||||||
|
|
||||||
SELF_HOSTED,
|
FIREWORKS = 5,
|
||||||
|
|
||||||
|
SELF_HOSTED = 4,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
@ -37,6 +40,8 @@ public static class ExtensionsProvider
|
|||||||
Providers.ANTHROPIC => "Anthropic",
|
Providers.ANTHROPIC => "Anthropic",
|
||||||
Providers.MISTRAL => "Mistral",
|
Providers.MISTRAL => "Mistral",
|
||||||
|
|
||||||
|
Providers.FIREWORKS => "Fireworks.ai",
|
||||||
|
|
||||||
Providers.SELF_HOSTED => "Self-hosted",
|
Providers.SELF_HOSTED => "Self-hosted",
|
||||||
|
|
||||||
_ => "Unknown",
|
_ => "Unknown",
|
||||||
@ -56,9 +61,11 @@ public static class ExtensionsProvider
|
|||||||
Providers.OPEN_AI => new ProviderOpenAI { InstanceName = providerSettings.InstanceName },
|
Providers.OPEN_AI => new ProviderOpenAI { InstanceName = providerSettings.InstanceName },
|
||||||
Providers.ANTHROPIC => new ProviderAnthropic { InstanceName = providerSettings.InstanceName },
|
Providers.ANTHROPIC => new ProviderAnthropic { InstanceName = providerSettings.InstanceName },
|
||||||
Providers.MISTRAL => new ProviderMistral { InstanceName = providerSettings.InstanceName },
|
Providers.MISTRAL => new ProviderMistral { InstanceName = providerSettings.InstanceName },
|
||||||
|
|
||||||
|
Providers.FIREWORKS => new ProviderFireworks { InstanceName = providerSettings.InstanceName },
|
||||||
|
|
||||||
Providers.SELF_HOSTED => new ProviderSelfHosted(providerSettings) { InstanceName = providerSettings.InstanceName },
|
Providers.SELF_HOSTED => new ProviderSelfHosted(providerSettings) { InstanceName = providerSettings.InstanceName },
|
||||||
|
|
||||||
_ => new NoProvider(),
|
_ => new NoProvider(),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
# v0.8.3 (WIP)
|
# v0.8.3 (WIP)
|
||||||
- Migrated UI framework from MudBlazor v6.x.x to v7.x.x
|
|
||||||
- Added an option to configure the behavior of the navigation bar in the settings
|
- Added an option to configure the behavior of the navigation bar in the settings
|
||||||
|
- Added support for Fireworks.ai as provider, where you can use e.g., the llama 3.1 405b model
|
||||||
- Improved the handling of self-hosted provider hostnames
|
- Improved the handling of self-hosted provider hostnames
|
||||||
- Improved the configured provider table: long model names are now truncated
|
- Improved the configured provider table: long model names are now truncated
|
||||||
|
- Migrated UI framework from MudBlazor v6.x.x to v7.x.x
|
Loading…
Reference in New Issue
Block a user