Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions Releases/0.7.10.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# 0.7.10 release

- Fix changeOfValue not returning tokens while using .WithFiles() method.
2 changes: 1 addition & 1 deletion src/MaIN.Core/.nuspec
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
<package>
<metadata>
<id>MaIN.NET</id>
<version>0.7.9</version>
<version>0.7.10</version>
<authors>Wisedev</authors>
<owners>Wisedev</owners>
<icon>favicon.png</icon>
Expand Down
1 change: 1 addition & 0 deletions src/MaIN.Services/Services/Abstract/ILLMService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ public interface ILLMService
/// <returns></returns>
Task<ChatResult?> AskMemory(Chat chat,
ChatMemoryOptions memoryOptions,
ChatRequestOptions requestOptions,
CancellationToken cancellationToken = default);

/// <summary>
Expand Down
4 changes: 2 additions & 2 deletions src/MaIN.Services/Services/LLMService/AnthropicService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@
if (HasFiles(lastMessage))
{
var result = ChatHelper.ExtractMemoryOptions(lastMessage);
var memoryResult = await AskMemory(chat, result, cancellationToken);
var memoryResult = await AskMemory(chat, result, options, cancellationToken);
resultBuilder.Append(memoryResult!.Message.Content);
lastMessage.MarkProcessed();
UpdateSessionCache(chat.Id, resultBuilder.ToString(), options.CreateSession);
Expand Down Expand Up @@ -488,7 +488,7 @@
{
requestBody["tools"] = chat.ToolsConfiguration.Tools.Select(t => new
{
name = t.Function.Name,

Check warning on line 491 in src/MaIN.Services/Services/LLMService/AnthropicService.cs

View workflow job for this annotation

GitHub Actions / build

Dereference of a possibly null reference.
description = t.Function.Description,
input_schema = t.Function.Parameters
}).ToList();
Expand Down Expand Up @@ -531,7 +531,7 @@
return messages;
}

public async Task<ChatResult?> AskMemory(Chat chat, ChatMemoryOptions memoryOptions, CancellationToken cancellationToken = default)
public async Task<ChatResult?> AskMemory(Chat chat, ChatMemoryOptions memoryOptions, ChatRequestOptions requestOptions, CancellationToken cancellationToken = default)
{
throw new NotSupportedException("Embeddings are not supported by the Anthropic. Document reading requires embedding support.");
}
Expand Down Expand Up @@ -824,9 +824,9 @@

file class AnthropicModelListResponse
{
public List<AnthropicModelInfo> Data { get; set; }

Check warning on line 827 in src/MaIN.Services/Services/LLMService/AnthropicService.cs

View workflow job for this annotation

GitHub Actions / build

Non-nullable property 'Data' must contain a non-null value when exiting constructor. Consider adding the 'required' modifier or declaring the property as nullable.
}

file class AnthropicModelInfo
{
public string Id { get; set; }

Check warning on line 832 in src/MaIN.Services/Services/LLMService/AnthropicService.cs

View workflow job for this annotation

GitHub Actions / build

Non-nullable property 'Id' must contain a non-null value when exiting constructor. Consider adding the 'required' modifier or declaring the property as nullable.
Expand Down
1 change: 1 addition & 0 deletions src/MaIN.Services/Services/LLMService/DeepSeekService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ protected override void ValidateApiKey()
public override async Task<ChatResult?> AskMemory(
Chat chat,
ChatMemoryOptions memoryOptions,
ChatRequestOptions requestOptions,
CancellationToken cancellationToken = default)
{
var lastMsg = chat.Messages.Last();
Expand Down
63 changes: 61 additions & 2 deletions src/MaIN.Services/Services/LLMService/GeminiService.cs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
using MaIN.Domain.Configuration;
using System.Text;
using MaIN.Domain.Configuration;
using MaIN.Services.Constants;
using MaIN.Services.Services.Abstract;
using MaIN.Services.Services.LLMService.Memory;
Expand All @@ -8,6 +9,7 @@
using System.Text.Json;
using System.Text.Json.Serialization;
using MaIN.Domain.Entities;
using MaIN.Domain.Models;
using MaIN.Services.Utils;

namespace MaIN.Services.Services.LLMService;
Expand All @@ -19,7 +21,7 @@
IMemoryFactory memoryFactory,
IMemoryService memoryService,
ILogger<GeminiService>? logger = null)
: OpenAiCompatibleService(notificationService, httpClientFactory, memoryFactory, memoryService, logger)

Check warning on line 24 in src/MaIN.Services/Services/LLMService/GeminiService.cs

View workflow job for this annotation

GitHub Actions / build

Parameter 'INotificationService notificationService' is captured into the state of the enclosing type and its value is also passed to the base constructor. The value might be captured by the base class as well.
{
private readonly MaINSettings _settings = settings ?? throw new ArgumentNullException(nameof(settings));

Expand Down Expand Up @@ -70,6 +72,7 @@
public override async Task<ChatResult?> AskMemory(
Chat chat,
ChatMemoryOptions memoryOptions,
ChatRequestOptions requestOptions,
CancellationToken cancellationToken = default)
{
if (!chat.Messages.Any())
Expand All @@ -88,7 +91,63 @@
$"{userQuery} | For your next response only, please respond using exactly the following JSON format: \n{jsonGrammar}\n. Do not include any explanations, code blocks, or additional content. After this single JSON response, resume your normal conversational style.";
}

var retrievedContext = await kernel.AskAsync(userQuery, cancellationToken: cancellationToken);
MemoryAnswer retrievedContext;

if (requestOptions.InteractiveUpdates || requestOptions.TokenCallback != null)
{
var responseBuilder = new StringBuilder();

var searchOptions = new SearchOptions
{
Stream = true
};

await foreach (var chunk in kernel.AskStreamingAsync(
userQuery,
options: searchOptions,
cancellationToken: cancellationToken))
{
if (!string.IsNullOrEmpty(chunk.Result))
{
responseBuilder.Append(chunk.Result);

var tokenValue = new LLMTokenValue
{
Text = chunk.Result,
Type = TokenType.Message
};

if (requestOptions.InteractiveUpdates)
{
await notificationService.DispatchNotification(
NotificationMessageBuilder.CreateChatCompletion(chat.Id, tokenValue, false),
ServiceConstants.Notifications.ReceiveMessageUpdate);
}

requestOptions.TokenCallback?.Invoke(tokenValue);
}
}

retrievedContext = new MemoryAnswer
{
Question = userQuery,
Result = responseBuilder.ToString(),
NoResult = responseBuilder.Length == 0
};
}
else
{
var searchOptions = new SearchOptions
{
Stream = false
};

retrievedContext = await kernel.AskAsync(
userQuery,
options: searchOptions,
cancellationToken: cancellationToken);
}

chat.Messages.Last().MarkProcessed();
await kernel.DeleteIndexAsync(cancellationToken: cancellationToken);
return CreateChatResult(chat, retrievedContext.Result, []);
Expand Down
1 change: 1 addition & 0 deletions src/MaIN.Services/Services/LLMService/GroqCloudService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ protected override void ValidateApiKey()
public override async Task<ChatResult?> AskMemory(
Chat chat,
ChatMemoryOptions memoryOptions,
ChatRequestOptions requestOptions,
CancellationToken cancellationToken = default)
{
var lastMsg = chat.Messages.Last();
Expand Down
64 changes: 60 additions & 4 deletions src/MaIN.Services/Services/LLMService/LLMService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ public LLMService(
if (ChatHelper.HasFiles(lastMsg))
{
var memoryOptions = ChatHelper.ExtractMemoryOptions(lastMsg);
return await AskMemory(chat, memoryOptions, cancellationToken);
return await AskMemory(chat, memoryOptions, requestOptions, cancellationToken);
}

var model = KnownModels.GetModel(chat.Model);
Expand Down Expand Up @@ -90,6 +90,7 @@ public Task CleanSessionCache(string? id)
public async Task<ChatResult?> AskMemory(
Chat chat,
ChatMemoryOptions memoryOptions,
ChatRequestOptions requestOptions,
CancellationToken cancellationToken = default)
{
var model = KnownModels.GetModel(chat.Model);
Expand All @@ -112,9 +113,64 @@ public Task CleanSessionCache(string? id)

await memoryService.ImportDataToMemory((memory.km, memory.generator), memoryOptions, cancellationToken);
var userMessage = chat.Messages.Last();
var result = await memory.km.AskAsync(
userMessage.Content,
cancellationToken: cancellationToken);

MemoryAnswer result;

if (requestOptions.InteractiveUpdates || requestOptions.TokenCallback != null)
{
var responseBuilder = new StringBuilder();

var searchOptions = new SearchOptions
{
Stream = true
};

await foreach (var chunk in memory.km.AskStreamingAsync(
userMessage.Content,
options: searchOptions,
cancellationToken: cancellationToken))
{
if (!string.IsNullOrEmpty(chunk.Result))
{
responseBuilder.Append(chunk.Result);

var tokenValue = new LLMTokenValue
{
Text = chunk.Result,
Type = TokenType.Message
};

if (requestOptions.InteractiveUpdates)
{
await notificationService.DispatchNotification(
NotificationMessageBuilder.CreateChatCompletion(chat.Id, tokenValue, false),
ServiceConstants.Notifications.ReceiveMessageUpdate);
}

requestOptions.TokenCallback?.Invoke(tokenValue);
}
}

result = new MemoryAnswer
{
Question = userMessage.Content,
Result = responseBuilder.ToString(),
NoResult = responseBuilder.Length == 0
};
}
else
{
var searchOptions = new SearchOptions
{
Stream = false
};

result = await memory.km.AskAsync(
userMessage.Content,
options: searchOptions,
cancellationToken: cancellationToken);
}

await memory.km.DeleteIndexAsync(cancellationToken: cancellationToken);

if (disableCache)
Expand Down
60 changes: 58 additions & 2 deletions src/MaIN.Services/Services/LLMService/OpenAiCompatibleService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ public abstract class OpenAiCompatibleService(
if (HasFiles(lastMessage))
{
var result = ChatHelper.ExtractMemoryOptions(lastMessage);
var memoryResult = await AskMemory(chat, result, cancellationToken);
var memoryResult = await AskMemory(chat, result, options, cancellationToken);
resultBuilder.Append(memoryResult!.Message.Content);
lastMessage.MarkProcessed();
UpdateSessionCache(chat.Id, resultBuilder.ToString(), options.CreateSession);
Expand Down Expand Up @@ -438,6 +438,7 @@ await _notificationService.DispatchNotification(
public virtual async Task<ChatResult?> AskMemory(
Chat chat,
ChatMemoryOptions memoryOptions,
ChatRequestOptions requestOptions,
CancellationToken cancellationToken = default)
{
if (!chat.Messages.Any())
Expand All @@ -455,8 +456,63 @@ await _notificationService.DispatchNotification(
userQuery = $"{userQuery} | Respond only using the following JSON format: \n{jsonGrammar}\n. Do not add explanations, code tags, or any extra content.";
}

var retrievedContext = await kernel.AskAsync(userQuery, cancellationToken: cancellationToken);
MemoryAnswer retrievedContext;

if (requestOptions.InteractiveUpdates || requestOptions.TokenCallback != null)
{
var responseBuilder = new StringBuilder();

var searchOptions = new SearchOptions
{
Stream = true
};

await foreach (var chunk in kernel.AskStreamingAsync(
userQuery,
options: searchOptions,
cancellationToken: cancellationToken))
{
if (!string.IsNullOrEmpty(chunk.Result))
{
responseBuilder.Append(chunk.Result);

var tokenValue = new LLMTokenValue
{
Text = chunk.Result,
Type = TokenType.Message
};

if (requestOptions.InteractiveUpdates)
{
await notificationService.DispatchNotification(
NotificationMessageBuilder.CreateChatCompletion(chat.Id, tokenValue, false),
ServiceConstants.Notifications.ReceiveMessageUpdate);
}

requestOptions.TokenCallback?.Invoke(tokenValue);
}
}

retrievedContext = new MemoryAnswer
{
Question = userQuery,
Result = responseBuilder.ToString(),
NoResult = responseBuilder.Length == 0
};
}
else
{
var searchOptions = new SearchOptions
{
Stream = false
};

retrievedContext = await kernel.AskAsync(
userQuery,
options: searchOptions,
cancellationToken: cancellationToken);
}

await kernel.DeleteIndexAsync(cancellationToken: cancellationToken);
return CreateChatResult(chat, retrievedContext.Result, []);
}
Expand Down
1 change: 1 addition & 0 deletions src/MaIN.Services/Services/LLMService/XaiService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ protected override void ValidateApiKey()
public override async Task<ChatResult?> AskMemory(
Chat chat,
ChatMemoryOptions memoryOptions,
ChatRequestOptions requestOptions,
CancellationToken cancellationToken = default)
{
var lastMsg = chat.Messages.Last();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ public class AnswerCommandHandler(
{
case KnowledgeUsage.UseMemory:
result = await llmService.AskMemory(command.Chat,
new ChatMemoryOptions { Memory = command.Chat.Memory });
new ChatMemoryOptions { Memory = command.Chat.Memory }, new ChatRequestOptions());
return result!.Message;
case KnowledgeUsage.UseKnowledge:
var isKnowledgeNeeded = await ShouldUseKnowledge(command.Knowledge, command.Chat);
Expand Down Expand Up @@ -138,7 +138,7 @@ await notificationService.DispatchNotification(NotificationMessageBuilder.Create
return result.Message;
}

var knowledgeResult = await llmService.AskMemory(chat, memoryOptions);
var knowledgeResult = await llmService.AskMemory(chat, memoryOptions, new ChatRequestOptions());
chat.Messages.Last().Content = originalContent;
return knowledgeResult?.Message;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ private async Task<Message> HandleFileSource(FetchCommand command, Dictionary<st
{
FilesData = filesDictionary,
PreProcess = fileData.PreProcess
}
}, new ChatRequestOptions()
);

return result!.Message;
Expand All @@ -112,7 +112,7 @@ private async Task<Message> HandleWebSource(FetchCommand command, Dictionary<str
{
var memoryChat = command.MemoryChat;
var result = await llmServiceFactory.CreateService(command.Chat.Backend ?? settings.BackendType)
.AskMemory(memoryChat!, new ChatMemoryOptions { WebUrls = [webData!.Url] });
.AskMemory(memoryChat!, new ChatMemoryOptions { WebUrls = [webData!.Url] }, new ChatRequestOptions());
result!.Message.Role = command.ResponseType == FetchResponseType.AS_System ? "System" : "Assistant";
return result!.Message;
}
Expand All @@ -131,7 +131,7 @@ private async Task<Message> ProcessJsonResponse(Message response, FetchCommand c
var result = await llmServiceFactory.CreateService(command.Chat.Backend ?? settings.BackendType).AskMemory(command.MemoryChat!, new ChatMemoryOptions
{
TextData = chunks
});
}, new ChatRequestOptions());

result!.Message.Role = command.ResponseType == FetchResponseType.AS_System ? "System" : "Assistant";
var newMessage = result!.Message;
Expand Down
Loading