Improved LM Studio handling by querying embedding models

This commit is contained in:
Thorsten Sommer 2024-12-03 15:15:35 +01:00
parent eae33e5271
commit 492cf4ac6e
Signed by: tsommer
GPG Key ID: 371BBA77A02C0108
3 changed files with 48 additions and 26 deletions

View File

@ -56,7 +56,7 @@
</MudSelect>
<MudStack Row="@true" AlignItems="AlignItems.Center">
@if (this.DataLLMProvider.IsEmbeddingModelProvidedManually())
@if (this.DataLLMProvider.IsEmbeddingModelProvidedManually(this.DataHost))
{
<MudTextField
T="string"

View File

@ -193,9 +193,9 @@ public static class LLMProvidersExtensions
_ => false,
};
public static bool IsEmbeddingModelProvidedManually(this LLMProviders provider) => provider switch
public static bool IsEmbeddingModelProvidedManually(this LLMProviders provider, Host host) => provider switch
{
LLMProviders.SELF_HOSTED => true,
LLMProviders.SELF_HOSTED => host is not Host.LM_STUDIO,
_ => false,
};

View File

@ -168,7 +168,48 @@ public sealed class ProviderSelfHosted(ILogger logger, Host host, string hostnam
case Host.LM_STUDIO:
case Host.OLLAMA:
return await this.LoadModels(["embed"], [], token, apiKeyProvisional);
}
return [];
}
catch(Exception e)
{
this.logger.LogError($"Failed to load text models from self-hosted provider: {e.Message}");
return [];
}
}
/// <inheritdoc />
public override Task<IEnumerable<Provider.Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Provider.Model>());
}
public override async Task<IEnumerable<Provider.Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
try
{
switch (host)
{
case Host.LM_STUDIO:
case Host.OLLAMA:
return await this.LoadModels([], ["embed"], token, apiKeyProvisional);
}
return [];
}
catch(Exception e)
{
this.logger.LogError($"Failed to load text models from self-hosted provider: {e.Message}");
return [];
}
}
#endregion
private async Task<IEnumerable<Provider.Model>> LoadModels(string[] ignorePhrases, string[] filterPhrases, CancellationToken token, string? apiKeyProvisional = null)
{
var secretKey = apiKeyProvisional switch
{
not null => apiKeyProvisional,
@ -188,28 +229,9 @@ public sealed class ProviderSelfHosted(ILogger logger, Host host, string hostnam
return [];
var lmStudioModelResponse = await lmStudioResponse.Content.ReadFromJsonAsync<ModelsResponse>(token);
return lmStudioModelResponse.Data.Where(n => !n.Id.Contains("embed")).Select(n => new Provider.Model(n.Id, null));
return lmStudioModelResponse.Data.
Where(model => !ignorePhrases.Any(ignorePhrase => model.Id.Contains(ignorePhrase, StringComparison.InvariantCulture)) &&
filterPhrases.All( filter => model.Id.Contains(filter, StringComparison.InvariantCulture)))
.Select(n => new Provider.Model(n.Id, null));
}
return [];
}
catch(Exception e)
{
this.logger.LogError($"Failed to load text models from self-hosted provider: {e.Message}");
return [];
}
}
/// <inheritdoc />
public override Task<IEnumerable<Provider.Model>> GetImageModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Provider.Model>());
}
public override Task<IEnumerable<Provider.Model>> GetEmbeddingModels(string? apiKeyProvisional = null, CancellationToken token = default)
{
return Task.FromResult(Enumerable.Empty<Provider.Model>());
}
#endregion
}