Skip to content

Commit 08007b9

Browse files
author
Piotr Stachaczynski
committed
feat: fix namings
1 parent 4960429 commit 08007b9

2 files changed

Lines changed: 5 additions & 5 deletions

File tree

src/MaIN.Domain/Models/SupportedModels.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -91,12 +91,12 @@ public static Model GetEmbeddingModel() =>
9191
new()
9292
{
9393
Name = KnownModelNames.Nomic_Embedding,
94-
FileName = "nomic-maIN.gguf",
94+
FileName = "nomic.gguf",
9595
Description = "Model used to generate embeddings.",
9696
DownloadUrl = string.Empty,
9797
};
9898

99-
public static Model GetModel(string path, string name)
99+
public static Model GetModel(string? path, string name)
100100
{
101101
var isPresent = Models.TryGetValue(name, out var model);
102102
if (!isPresent)

src/MaIN.Services/Services/LLMService/LLMService.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -276,7 +276,7 @@ private void AddMessagesToHistory(ChatSession session, List<Message> messages)
276276

277277

278278
[Experimental("KMEXP01")]
279-
private static IKernelMemory CreateMemory(string modelName, string path,
279+
private static IKernelMemory CreateMemory(string modelName, string? path,
280280
out KernelMemFix.LlamaSharpTextGenerator generator)
281281
{
282282
InferenceParams infParams = new() { AntiPrompts = ["INFO", "<|im_end|>", "Question:"] };
@@ -307,7 +307,7 @@ private static IKernelMemory CreateMemory(string modelName, string path,
307307
.Build();
308308
}
309309

310-
internal static async Task<LLamaWeights> GetOrLoadModelAsync(string path, string modelKey)
310+
internal static async Task<LLamaWeights> GetOrLoadModelAsync(string? path, string modelKey)
311311
{
312312
if (modelCache.TryGetValue(modelKey, out var cachedModel))
313313
{
@@ -452,7 +452,7 @@ public static IKernelMemoryBuilder WithLLamaSharpTextGeneration(
452452

453453
[Experimental("KMEXP01")]
454454
public static IKernelMemoryBuilder WithLLamaSharpMaINTemp(this IKernelMemoryBuilder builder,
455-
LLamaSharpConfig config, string path, string modelName, out LlamaSharpTextGenerator generator)
455+
LLamaSharpConfig config, string? path, string modelName, out LlamaSharpTextGenerator generator)
456456
{
457457
// Load the first model with caching.
458458
var model = LLMService.GetOrLoadModelAsync(path, modelName).Result;

0 commit comments

Comments
 (0)