diff --git a/MaIN.Core.IntegrationTests/BackendParamsTests.cs b/MaIN.Core.E2ETests/BackendParamsTests.cs similarity index 99% rename from MaIN.Core.IntegrationTests/BackendParamsTests.cs rename to MaIN.Core.E2ETests/BackendParamsTests.cs index cf240ff6..01708fa9 100644 --- a/MaIN.Core.IntegrationTests/BackendParamsTests.cs +++ b/MaIN.Core.E2ETests/BackendParamsTests.cs @@ -1,13 +1,14 @@ using MaIN.Core.Hub; using MaIN.Domain.Configuration; -using MaIN.Domain.Entities; using MaIN.Domain.Configuration.BackendInferenceParams; +using MaIN.Domain.Entities; using MaIN.Domain.Exceptions; using MaIN.Domain.Models; using MaIN.Domain.Models.Concrete; -namespace MaIN.Core.IntegrationTests; +namespace MaIN.Core.E2ETests; +[Collection("E2ETests")] public class BackendParamsTests : IntegrationTestBase { private const string TestQuestion = "What is 2+2? Answer with just the number."; diff --git a/MaIN.Core.IntegrationTests/ChatTests.cs b/MaIN.Core.E2ETests/ChatTests.cs similarity index 53% rename from MaIN.Core.IntegrationTests/ChatTests.cs rename to MaIN.Core.E2ETests/ChatTests.cs index 60d67048..5917ffbc 100644 --- a/MaIN.Core.IntegrationTests/ChatTests.cs +++ b/MaIN.Core.E2ETests/ChatTests.cs @@ -1,12 +1,13 @@ using FuzzySharp; +using MaIN.Core.E2ETests.Helpers; using MaIN.Core.Hub; -using MaIN.Core.IntegrationTests.Helpers; using MaIN.Domain.Entities; using MaIN.Domain.Models; using MaIN.Domain.Models.Abstract; -namespace MaIN.Core.IntegrationTests; +namespace MaIN.Core.E2ETests; +[Collection("E2ETests")] public class ChatTests : IntegrationTestBase { public ChatTests() : base() @@ -16,7 +17,7 @@ public ChatTests() : base() [Fact] public async Task Should_AnswerQuestion_BasicChat() { - var context = AIHub.Chat().WithModel(Models.Local.Gemma2_2b); + var context = AIHub.Chat().WithModel(Models.Local.Qwen2_5_0_5b); var result = await context .WithMessage("Where the hedgehog goes at night?") @@ -28,28 +29,38 @@ public async Task Should_AnswerQuestion_BasicChat() } [Fact] - public async Task Should_AnswerDifferences_BetweenDocuments_ChatWithFiles() + public async Task Should_AnswerFileSubject_ChatWithFiles() { - List files = ["./Files/Nicolaus_Copernicus.pdf", "./Files/Galileo_Galilei.pdf"]; + List files = ["./Files/Nicolaus_Copernicus.pdf"]; var result = await AIHub.Chat() - .WithModel(Models.Local.Gemma2_2b) - .WithMessage("You have 2 documents in memory. Whats the difference of work between Galileo and Copernicus?. Give answer based on the documents.") + .WithModel(Models.Local.Qwen2_5_0_5b) + .WithMessage("Who is described in the file? Reply with ONLY their full name. No explanation, no punctuation. Example: Isaak Newton") + .WithMemoryParams(new MemoryParams { AnswerTokens = 10 }) .WithFiles(files) .CompleteAsync(); Assert.True(result.Done); Assert.NotNull(result.Message); Assert.NotEmpty(result.Message.Content); + var ratio = Fuzz.PartialRatio("nicolaus copernicus", result.Message.Content.ToLowerInvariant()); + Assert.True(ratio > 50, + $""" + Fuzzy match failed! + Expected > 50, but got {ratio}. + Expected: 'nicolaus copernicus' + Actual: '{result.Message.Content}' + """); } [Fact] public async Task Should_AnswerQuestion_FromExistingChat() { var result = AIHub.Chat() - .WithModel(Models.Local.Gemma2_2b); + .WithModel(Models.Local.Qwen2_5_0_5b); await result.WithMessage("What do you think about math theories?") + .WithMemoryParams(new MemoryParams { AnswerTokens = 10 }) .CompleteAsync(); await result.WithMessage("And about physics?") @@ -62,29 +73,53 @@ await result.WithMessage("And about physics?") } [Fact] - public async Task Should_AnswerGameFromImage_ChatWithVision() + public async Task Should_AnswerGameFromImage_ChatWithImagesWithText() { List images = ["./Files/gamex.jpg"]; + var expectedAnswer = "call of duty"; var result = await AIHub.Chat() .WithModel(Models.Local.Llama3_2_3b) - .WithMessage("What is the title of the game? Answer only this question.") - .WithMemoryParams(new MemoryParams - { - AnswerTokens = 1000 - }) + .WithMessage("What is the title of the game? Answer in 3 words.") + .WithMemoryParams(new MemoryParams { AnswerTokens = 10 }) + .WithFiles(images) + .CompleteAsync(); + + Assert.True(result.Done); + Assert.NotNull(result.Message); + Assert.NotEmpty(result.Message.Content); + var ratio = Fuzz.PartialRatio(expectedAnswer, result.Message.Content.ToLowerInvariant()); + Assert.True(ratio > 50, + $""" + Fuzzy match failed! + Expected > 50, but got {ratio}. + Expexted: '{expectedAnswer}' + Actual: '{result.Message.Content}' + """); + } + + [Fact] + public async Task Should_AnswerAppleFromImage_ChatWithImagesWithVision() + { + List images = ["./Files/apple.jpg"]; + var expectedAnswer = "apple"; + + var result = await AIHub.Chat() + .WithModel(Models.Local.Gemma3_4b) + .WithMessage("What is this fruit? Answer in one word.") + .WithMemoryParams(new MemoryParams { AnswerTokens = 10 }) .WithFiles(images) .CompleteAsync(); Assert.True(result.Done); Assert.NotNull(result.Message); Assert.NotEmpty(result.Message.Content); - var ratio = Fuzz.PartialRatio("call of duty", result.Message.Content.ToLowerInvariant()); + var ratio = Fuzz.PartialRatio(expectedAnswer, result.Message.Content.ToLowerInvariant()); Assert.True(ratio > 50, $""" Fuzzy match failed! Expected > 50, but got {ratio}. - Expexted: 'call of duty' + Expexted: '{expectedAnswer}' Actual: '{result.Message.Content}' """); } @@ -113,9 +148,9 @@ public async Task Should_GenerateImage_BasedOnPrompt() } [Fact] - public async Task Should_AnswerDifferences_BetweenDocuments_ChatWithFiles_UsingStreams() + public async Task Should_AnswerFileSubject_ChatWithFiles_UsingStreams() { - List files = ["./Files/Nicolaus_Copernicus.pdf", "./Files/Galileo_Galilei.pdf"]; + List files = ["./Files/Nicolaus_Copernicus.pdf"]; var fileStreams = new List(); @@ -135,14 +170,25 @@ public async Task Should_AnswerDifferences_BetweenDocuments_ChatWithFiles_UsingS fileStreams.Add(fs); } + var expectedAnswer = "nicolaus copernicus"; + var result = await AIHub.Chat() - .WithModel(Models.Local.Gemma2_2b) - .WithMessage("You have 2 documents in memory. Whats the difference of work between Galileo and Copernicus?. Give answer based on the documents.") + .WithModel(Models.Local.Qwen2_5_0_5b) + .WithMessage("Who is described in the file? Reply with ONLY their full name. No explanation, no punctuation. Example: Isaak Newton") + .WithMemoryParams(new MemoryParams { AnswerTokens = 10 }) .WithFiles(fileStreams) .CompleteAsync(); Assert.True(result.Done); Assert.NotNull(result.Message); Assert.NotEmpty(result.Message.Content); + var ratio = Fuzz.PartialRatio(expectedAnswer, result.Message.Content.ToLowerInvariant()); + Assert.True(ratio > 50, + $""" + Fuzzy match failed! + Expected > 50, but got {ratio}. + Expected: '{expectedAnswer}' + Actual: '{result.Message.Content}' + """); } } diff --git a/MaIN.Core.IntegrationTests/Files/Books.json b/MaIN.Core.E2ETests/Files/Books.json similarity index 100% rename from MaIN.Core.IntegrationTests/Files/Books.json rename to MaIN.Core.E2ETests/Files/Books.json diff --git a/MaIN.Core.IntegrationTests/Files/Galileo_Galilei.pdf b/MaIN.Core.E2ETests/Files/Galileo_Galilei.pdf similarity index 100% rename from MaIN.Core.IntegrationTests/Files/Galileo_Galilei.pdf rename to MaIN.Core.E2ETests/Files/Galileo_Galilei.pdf diff --git a/MaIN.Core.IntegrationTests/Files/Nicolaus_Copernicus.pdf b/MaIN.Core.E2ETests/Files/Nicolaus_Copernicus.pdf similarity index 100% rename from MaIN.Core.IntegrationTests/Files/Nicolaus_Copernicus.pdf rename to MaIN.Core.E2ETests/Files/Nicolaus_Copernicus.pdf diff --git a/MaIN.Core.E2ETests/Files/apple.jpg b/MaIN.Core.E2ETests/Files/apple.jpg new file mode 100644 index 00000000..a416e602 Binary files /dev/null and b/MaIN.Core.E2ETests/Files/apple.jpg differ diff --git a/MaIN.Core.IntegrationTests/Files/gamex.jpg b/MaIN.Core.E2ETests/Files/gamex.jpg similarity index 100% rename from MaIN.Core.IntegrationTests/Files/gamex.jpg rename to MaIN.Core.E2ETests/Files/gamex.jpg diff --git a/MaIN.Core.IntegrationTests/Helpers/NetworkHelper.cs b/MaIN.Core.E2ETests/Helpers/NetworkHelper.cs similarity index 90% rename from MaIN.Core.IntegrationTests/Helpers/NetworkHelper.cs rename to MaIN.Core.E2ETests/Helpers/NetworkHelper.cs index 1482d4e4..7c9f22fe 100644 --- a/MaIN.Core.IntegrationTests/Helpers/NetworkHelper.cs +++ b/MaIN.Core.E2ETests/Helpers/NetworkHelper.cs @@ -1,7 +1,6 @@ -using System; using System.Net.Sockets; -namespace MaIN.Core.IntegrationTests.Helpers; +namespace MaIN.Core.E2ETests.Helpers; public static class NetworkHelper { diff --git a/MaIN.Core.E2ETests/IntegrationTestBase.cs b/MaIN.Core.E2ETests/IntegrationTestBase.cs new file mode 100644 index 00000000..7efff4f8 --- /dev/null +++ b/MaIN.Core.E2ETests/IntegrationTestBase.cs @@ -0,0 +1,40 @@ +using Microsoft.AspNetCore.Hosting; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; + +namespace MaIN.Core.E2ETests; + +public class IntegrationTestBase : IDisposable +{ + protected readonly IHost _host; + protected readonly IServiceProvider _services; + + protected IntegrationTestBase() + { + _host = Host.CreateDefaultBuilder() + .ConfigureServices((context, services) => + { + services.AddMaIN(context.Configuration); + ConfigureServices(services); + }) + .Build(); + + _host.Services.UseMaIN(); + _host.Start(); + + _services = _host.Services; + } + + // Allow derived classes to add additional services or override existing ones + protected virtual void ConfigureServices(IServiceCollection services) + { + } + + protected T GetService() where T : notnull => _services.GetRequiredService(); + + public void Dispose() + { + _host.Dispose(); + GC.SuppressFinalize(this); + } +} diff --git a/MaIN.Core.E2ETests/MaIN.Core.E2ETests.csproj b/MaIN.Core.E2ETests/MaIN.Core.E2ETests.csproj new file mode 100644 index 00000000..5d44937e --- /dev/null +++ b/MaIN.Core.E2ETests/MaIN.Core.E2ETests.csproj @@ -0,0 +1,47 @@ + + + + net8.0;net10.0 + enable + enable + false + + + + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + + + + + + + + + + + + Always + + + Always + + + PreserveNewest + + + Always + + + PreserveNewest + + + + diff --git a/MaIN.Core.IntegrationTests/AnthropicServiceTests.cs b/MaIN.Core.IntegrationTests/AnthropicServiceTests.cs new file mode 100644 index 00000000..c00a4639 --- /dev/null +++ b/MaIN.Core.IntegrationTests/AnthropicServiceTests.cs @@ -0,0 +1,169 @@ +using System.Text.Json; +using MaIN.Core.Hub; +using MaIN.Domain.Configuration; +using MaIN.Domain.Configuration.BackendInferenceParams; +using MaIN.Domain.Entities.Tools; +using MaIN.Domain.Exceptions; +using MaIN.Domain.Models.Abstract; +using MaIN.Services.Services.Models; + +namespace MaIN.Core.IntegrationTests; + +[Collection("IntegrationTests")] +public class AnthropicServiceTests : LLMServiceTestBase +{ + private const string ModelId = "claude-sonnet-4-5"; + + public AnthropicServiceTests() + { + ModelRegistry.RegisterOrReplace(new GenericCloudModel(ModelId, BackendType.Anthropic)); + HttpHandler.ResponseBody = AnthropicResponse("ok"); + } + + [Fact] + public async Task Should_SetMaxTokens_DefaultTo4096_WhenNotSpecified() + { + await AIHub.Chat() + .WithModel(ModelId) + .WithMessage("hi") + .CompleteAsync(); + + JsonElement root = HttpHandler.LastRequestJson!.RootElement; + Assert.Equal(4096, root.GetProperty("max_tokens").GetInt32()); + } + + [Fact] + public async Task Should_MapMaxTokens_FromAnthropicInferenceParams() + { + await AIHub.Chat() + .WithModel(ModelId) + .WithMessage("hi") + .WithInferenceParams(new AnthropicInferenceParams { MaxTokens = 2048 }) + .CompleteAsync(); + + JsonElement root = HttpHandler.LastRequestJson!.RootElement; + Assert.Equal(2048, root.GetProperty("max_tokens").GetInt32()); + } + + [Fact] + public async Task Should_ExtractSystemPrompt_ToTopLevelField() + { + await AIHub.Chat() + .WithModel(ModelId) + .WithMessage("hello") + .WithSystemPrompt("Be helpful") + .CompleteAsync(); + + JsonElement root = HttpHandler.LastRequestJson!.RootElement; + Assert.True(root.TryGetProperty("system", out JsonElement systemProp)); + Assert.Equal("Be helpful", systemProp.GetString()); + } + + [Fact] + public async Task Should_SendImages_AsBase64_WithMediaType() + { + const string visionModelId = "claude-sonnet-4-5-vision"; + ModelRegistry.RegisterOrReplace(new GenericCloudVisionModel(visionModelId, BackendType.Anthropic)); + + var imageBytes = new byte[] { 0xFF, 0xD8, 0xFF, 0xE0 }; // JPEG magic bytes + + await AIHub.Chat() + .WithModel(visionModelId) + .WithMessage("describe this image", imageBytes) + .CompleteAsync(); + + JsonElement root = HttpHandler.LastRequestJson!.RootElement; + JsonElement messages = root.GetProperty("messages"); + JsonElement userMessage = messages.EnumerateArray() + .FirstOrDefault(m => m.GetProperty("role").GetString() == "user"); + + Assert.NotEqual(default, userMessage); + JsonElement content = userMessage.GetProperty("content"); + Assert.Equal(JsonValueKind.Array, content.ValueKind); + JsonElement imagePart = content.EnumerateArray() + .FirstOrDefault(p => p.GetProperty("type").GetString() == "image"); + Assert.NotEqual(default, imagePart); + Assert.Equal("base64", imagePart.GetProperty("source").GetProperty("type").GetString()); + } + + [Fact] + public async Task Should_IncludeXApiKeyHeader() + { + await AIHub.Chat() + .WithModel(ModelId) + .WithMessage("hi") + .CompleteAsync(); + + Assert.True(HttpHandler.LastRequest!.Headers.Contains("x-api-key")); + } + + [Fact] + public async Task Should_IncludeAnthropicVersionHeader() + { + await AIHub.Chat() + .WithModel(ModelId) + .WithMessage("hi") + .CompleteAsync(); + + Assert.True(HttpHandler.LastRequest!.Headers.Contains("anthropic-version")); + } + + [Fact] + public async Task Should_ParseContent_FromNonStreamingResponse() + { + HttpHandler.ResponseBody = AnthropicResponse("hello"); + + ChatResult result = await AIHub.Chat() + .WithModel(ModelId) + .WithMessage("hi") + .CompleteAsync(); + + Assert.Equal("hello", result.Message.Content); + } + + [Fact] + public async Task Should_UseInputSchema_NotParameters_ForTools() + { + var tools = new ToolsConfiguration + { + Tools = + [ + new ToolDefinition + { + Type = "function", + Function = new FunctionDefinition + { + Name = "get_weather", + Description = "Get current weather", + Parameters = new { type = "object", properties = new { } } + }, + Execute = _ => Task.FromResult("sunny") + } + ] + }; + + await AIHub.Chat() + .WithModel(ModelId) + .WithMessage("what's the weather?") + .WithTools(tools) + .CompleteAsync(); + + JsonElement root = HttpHandler.LastRequestJson!.RootElement; + JsonElement toolsArray = root.GetProperty("tools"); + JsonElement tool = toolsArray[0]; + + Assert.True(tool.TryGetProperty("input_schema", out _)); + Assert.False(tool.TryGetProperty("parameters", out _)); + } + + [Fact] + public async Task Should_ThrowInvalidBackendParamsException_WhenWrongParams() + { + await Assert.ThrowsAsync(() => + AIHub.Chat() + .WithModel(ModelId) + .WithMessage("hi") + .WithInferenceParams(new OpenAiInferenceParams()) + .CompleteAsync()); + } +} diff --git a/MaIN.Core.IntegrationTests/ChatPipelineTests.cs b/MaIN.Core.IntegrationTests/ChatPipelineTests.cs new file mode 100644 index 00000000..13f099b6 --- /dev/null +++ b/MaIN.Core.IntegrationTests/ChatPipelineTests.cs @@ -0,0 +1,194 @@ +using MaIN.Core.Hub; +using MaIN.Domain.Configuration; +using MaIN.Domain.Configuration.BackendInferenceParams; +using MaIN.Domain.Entities; +using MaIN.Domain.Exceptions.Chats; +using MaIN.Domain.Models.Abstract; +using MaIN.Services.Services.Models; + +namespace MaIN.Core.IntegrationTests; + +[Collection("IntegrationTests")] +public class ChatPipelineTests : PipelineTestBase +{ + private const string TestModelId = "pipeline-test-model"; + + public ChatPipelineTests() + { + ModelRegistry.RegisterOrReplace(new GenericCloudModel(TestModelId, BackendType.OpenAi)); + SetTextResponse("default response"); + } + + [Fact] + public async Task Should_ReturnDone_OnSimpleCompletion() + { + var result = await AIHub.Chat() + .WithModel(TestModelId) + .WithMessage("Hello") + .CompleteAsync(); + + Assert.True(result.Done); + } + + [Fact] + public async Task Should_ReturnConfiguredContent_WhenHandlerSet() + { + SetTextResponse("custom content"); + + var result = await AIHub.Chat() + .WithModel(TestModelId) + .WithMessage("Hello") + .CompleteAsync(); + + Assert.Equal("custom content", result.Message.Content); + } + + [Fact] + public async Task Should_PersistAssistantMessage_AfterCompletion() + { + SetTextResponse("assistant reply"); + + var context = AIHub.Chat().WithModel(TestModelId); + await context + .WithMessage("Hello") + .CompleteAsync(); + + var chatId = context.GetChatId(); + var existing = await AIHub.Chat().FromExisting(chatId); + var history = existing.GetChatHistory(); + + Assert.Equal(2, history.Count); + } + + [Fact] + public async Task Should_AccumulateMessages_AcrossMultipleTurns() + { + SetTextResponse("reply"); + + var context = AIHub.Chat().WithModel(TestModelId); + + await context.WithMessage("Turn 1").CompleteAsync(); + await context.WithMessage("Turn 2").CompleteAsync(); + + var history = context.GetChatHistory(); + Assert.Equal(4, history.Count); + } + + [Fact] + public async Task Should_SendUserMessageToHandler_WithCorrectRole() + { + Chat? captured = null; + FakeFactory.Service.Handler = chat => + { + captured = chat; + return new ChatResult + { + Model = chat.ModelId, + Done = true, + CreatedAt = DateTime.UtcNow, + Message = new Message { Role = "assistant", Content = "ok", Type = MessageType.CloudLLM } + }; + }; + + await AIHub.Chat() + .WithModel(TestModelId) + .WithMessage("Hello from user") + .CompleteAsync(); + + Assert.NotNull(captured); + Assert.Contains(captured!.Messages, m => m.Role == "User"); + } + + [Fact] + public async Task Should_ApplySystemPrompt_AsFirstMessage() + { + Chat? captured = null; + FakeFactory.Service.Handler = chat => + { + captured = chat; + return new ChatResult + { + Model = chat.ModelId, + Done = true, + CreatedAt = DateTime.UtcNow, + Message = new Message { Role = "assistant", Content = "ok", Type = MessageType.CloudLLM } + }; + }; + + await AIHub.Chat() + .WithModel(TestModelId) + .WithMessage("User message") + .WithSystemPrompt("Be concise") + .CompleteAsync(); + + Assert.NotNull(captured); + Assert.Equal("System", captured!.Messages[0].Role); + Assert.Equal("Be concise", captured!.Messages[0].Content); + } + + [Fact] + public async Task Should_ThrowEmptyChatException_WhenNoMessageAdded() + { + var context = AIHub.Chat() + .WithModel(TestModelId) + .WithMessages([]); + + await Assert.ThrowsAsync(() => context.CompleteAsync()); + } + + [Fact] + public async Task Should_UseLastModel_WhenSetTwice() + { + const string secondModel = "pipeline-test-model-2"; + ModelRegistry.RegisterOrReplace(new GenericCloudModel(secondModel, BackendType.OpenAi)); + + Chat? captured = null; + FakeFactory.Service.Handler = chat => + { + captured = chat; + return new ChatResult + { + Model = chat.ModelId, + Done = true, + CreatedAt = DateTime.UtcNow, + Message = new Message { Role = "assistant", Content = "ok", Type = MessageType.CloudLLM } + }; + }; + + var entry = AIHub.Chat(); + entry.WithModel(TestModelId); + await entry + .WithModel(secondModel) + .WithMessage("Hello") + .CompleteAsync(); + + Assert.Equal(secondModel, captured!.ModelId); + } + + [Fact] + public async Task Should_SetBackendParams_WhenInferenceParamsProvided() + { + Chat? captured = null; + FakeFactory.Service.Handler = chat => + { + captured = chat; + return new ChatResult + { + Model = chat.ModelId, + Done = true, + CreatedAt = DateTime.UtcNow, + Message = new Message { Role = "assistant", Content = "ok", Type = MessageType.CloudLLM } + }; + }; + + await AIHub.Chat() + .WithModel(TestModelId) + .WithMessage("Hello") + .WithInferenceParams(new OpenAiInferenceParams { Temperature = 0.42f }) + .CompleteAsync(); + + Assert.NotNull(captured); + var openAiParams = Assert.IsType(captured!.BackendParams); + Assert.Equal(0.42f, openAiParams.Temperature); + } +} diff --git a/MaIN.Core.IntegrationTests/Fakes/FakeHttpClientFactory.cs b/MaIN.Core.IntegrationTests/Fakes/FakeHttpClientFactory.cs new file mode 100644 index 00000000..60156a9e --- /dev/null +++ b/MaIN.Core.IntegrationTests/Fakes/FakeHttpClientFactory.cs @@ -0,0 +1,8 @@ +namespace MaIN.Core.IntegrationTests.Fakes; + +public sealed class FakeHttpClientFactory : IHttpClientFactory +{ + public FakeHttpMessageHandler Handler { get; } = new(); + + public HttpClient CreateClient(string name) => new(Handler, false); +} diff --git a/MaIN.Core.IntegrationTests/Fakes/FakeHttpMessageHandler.cs b/MaIN.Core.IntegrationTests/Fakes/FakeHttpMessageHandler.cs new file mode 100644 index 00000000..70620ffd --- /dev/null +++ b/MaIN.Core.IntegrationTests/Fakes/FakeHttpMessageHandler.cs @@ -0,0 +1,33 @@ +using System.Net; +using System.Text; +using System.Text.Json; + +namespace MaIN.Core.IntegrationTests.Fakes; + +public sealed class FakeHttpMessageHandler : HttpMessageHandler +{ + public HttpRequestMessage? LastRequest { get; private set; } + public string? LastRequestBody { get; private set; } + public JsonDocument? LastRequestJson { get; private set; } + public HttpStatusCode ResponseStatusCode { get; set; } = HttpStatusCode.OK; + public string ResponseBody { get; set; } = string.Empty; + + protected override async Task SendAsync(HttpRequestMessage request, CancellationToken ct) + { + LastRequest = request; + if (request.Content is not null) + { + LastRequestBody = await request.Content.ReadAsStringAsync(ct); + try + { + LastRequestJson = JsonDocument.Parse(LastRequestBody); + } + catch { } + } + + return new HttpResponseMessage(ResponseStatusCode) + { + Content = new StringContent(ResponseBody, Encoding.UTF8, "application/json") + }; + } +} diff --git a/MaIN.Core.IntegrationTests/Fakes/FakeLLMService.cs b/MaIN.Core.IntegrationTests/Fakes/FakeLLMService.cs new file mode 100644 index 00000000..4d8bdb10 --- /dev/null +++ b/MaIN.Core.IntegrationTests/Fakes/FakeLLMService.cs @@ -0,0 +1,25 @@ +using MaIN.Domain.Entities; +using MaIN.Services.Services.Abstract; +using MaIN.Services.Services.LLMService; +using MaIN.Services.Services.Models; + +namespace MaIN.Core.IntegrationTests.Fakes; + +public sealed class FakeLLMService : ILLMService +{ + public Func? Handler { get; set; } + + public Task Send(Chat chat, ChatRequestOptions options, CancellationToken ct = default) + => Task.FromResult(Handler?.Invoke(chat)); + + public Task AskMemory( + Chat chat, + ChatMemoryOptions memOpts, + ChatRequestOptions reqOpts, + CancellationToken ct = default) + => Task.FromResult(Handler?.Invoke(chat)); + + public Task GetCurrentModels() => Task.FromResult(Array.Empty()); + + public Task CleanSessionCache(string id) => Task.CompletedTask; +} diff --git a/MaIN.Core.IntegrationTests/Fakes/FakeLLMServiceFactory.cs b/MaIN.Core.IntegrationTests/Fakes/FakeLLMServiceFactory.cs new file mode 100644 index 00000000..74693674 --- /dev/null +++ b/MaIN.Core.IntegrationTests/Fakes/FakeLLMServiceFactory.cs @@ -0,0 +1,12 @@ +using MaIN.Domain.Configuration; +using MaIN.Services.Services.Abstract; +using MaIN.Services.Services.LLMService.Factory; + +namespace MaIN.Core.IntegrationTests.Fakes; + +public sealed class FakeLLMServiceFactory : ILLMServiceFactory +{ + public FakeLLMService Service { get; } = new(); + + public ILLMService CreateService(BackendType backendType) => Service; +} diff --git a/MaIN.Core.IntegrationTests/IntegrationTestBase.cs b/MaIN.Core.IntegrationTests/IntegrationTestBase.cs index db53171a..ac948253 100644 --- a/MaIN.Core.IntegrationTests/IntegrationTestBase.cs +++ b/MaIN.Core.IntegrationTests/IntegrationTestBase.cs @@ -1,5 +1,3 @@ -using System.Net.Sockets; -using Microsoft.AspNetCore.Hosting; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; @@ -40,4 +38,4 @@ public void Dispose() { _host?.Dispose(); } -} \ No newline at end of file +} diff --git a/MaIN.Core.IntegrationTests/LLMServiceTestBase.cs b/MaIN.Core.IntegrationTests/LLMServiceTestBase.cs new file mode 100644 index 00000000..d4e7852d --- /dev/null +++ b/MaIN.Core.IntegrationTests/LLMServiceTestBase.cs @@ -0,0 +1,68 @@ +using MaIN.Core.IntegrationTests.Fakes; +using MaIN.Domain.Configuration; +using Microsoft.Extensions.DependencyInjection; + +namespace MaIN.Core.IntegrationTests; + +public class LLMServiceTestBase : IntegrationTestBase +{ + protected readonly FakeHttpClientFactory FakeClientFactory = new(); + protected FakeHttpMessageHandler HttpHandler => FakeClientFactory.Handler; + + protected override void ConfigureServices(IServiceCollection services) + { + services.AddSingleton(FakeClientFactory); + services.AddSingleton(new MaINSettings + { + OpenAiKey = "test-openai-key", + AnthropicKey = "test-anthropic-key", + GeminiKey = "test-gemini-key", + DeepSeekKey = "test-deepseek-key", + GroqCloudKey = "test-groq-key", + XaiKey = "test-xai-key", + }); + } + + protected static string OpenAiResponse(string content, string model = "gpt-4o-mini") => + $$""" + { + "choices": [ + { + "message": { + "role": "assistant", + "content": "{{content}}" + } + } + ], + "model": "{{model}}" + } + """; + + protected static string AnthropicResponse(string content) => + $$""" + { + "content": [ + { + "type": "text", + "text": "{{content}}" + } + ], + "model": "claude-sonnet-4-5", + "id": "msg_test" + } + """; + protected static string OpenAiStreamResponse(string content) => + $$$""" + data: {"choices":[{"delta":{"content":"{{{content}}}"}}]} + data: [DONE] + + """; + protected static string AnthropicStreamResponse(string content) => + $$$""" + event: content_block_delta + data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"{{{content}}}"}} + event: message_stop + data: {"type":"message_stop"} + + """; +} diff --git a/MaIN.Core.IntegrationTests/MaIN.Core.IntegrationTests.csproj b/MaIN.Core.IntegrationTests/MaIN.Core.IntegrationTests.csproj index 460c35cf..a7196baf 100644 --- a/MaIN.Core.IntegrationTests/MaIN.Core.IntegrationTests.csproj +++ b/MaIN.Core.IntegrationTests/MaIN.Core.IntegrationTests.csproj @@ -1,44 +1,23 @@ - - - - net8.0;net10.0 - enable - enable - false - - - - - - - - all - runtime; build; native; contentfiles; analyzers; buildtransitive - - - - - - - - - - - - - - - Always - - - PreserveNewest - - - Always - - - PreserveNewest - - - + + + net8.0;net10.0 + enable + enable + false + true + + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + + + + + diff --git a/MaIN.Core.IntegrationTests/OpenAiServiceTests.cs b/MaIN.Core.IntegrationTests/OpenAiServiceTests.cs new file mode 100644 index 00000000..60f94fa0 --- /dev/null +++ b/MaIN.Core.IntegrationTests/OpenAiServiceTests.cs @@ -0,0 +1,210 @@ +using System.Text.Json; +using MaIN.Core.Hub; +using MaIN.Domain.Configuration; +using MaIN.Domain.Configuration.BackendInferenceParams; +using MaIN.Domain.Entities.Tools; +using MaIN.Domain.Exceptions; +using MaIN.Domain.Models.Abstract; + +namespace MaIN.Core.IntegrationTests; + +[Collection("IntegrationTests")] +public class OpenAiServiceTests : LLMServiceTestBase +{ + private const string ModelId = "gpt-4o-mini"; + + public OpenAiServiceTests() + { + ModelRegistry.RegisterOrReplace(new GenericCloudModel(ModelId, BackendType.OpenAi)); + HttpHandler.ResponseBody = OpenAiResponse("ok"); + } + + [Fact] + public async Task Should_SendModelId_InRequestBody() + { + await AIHub.Chat() + .WithModel(ModelId) + .WithMessage("hi") + .CompleteAsync(); + + var root = HttpHandler.LastRequestJson!.RootElement; + Assert.Equal(ModelId, root.GetProperty("model").GetString()); + } + + [Fact] + public async Task Should_SendUserMessage_InMessagesArray() + { + await AIHub.Chat() + .WithModel(ModelId) + .WithMessage("hello world") + .CompleteAsync(); + + var root = HttpHandler.LastRequestJson!.RootElement; + var messages = root.GetProperty("messages"); + var userMessage = messages.EnumerateArray() + .FirstOrDefault(m => m.GetProperty("role").GetString() == "user"); + + Assert.NotEqual(default, userMessage); + Assert.Equal("hello world", userMessage.GetProperty("content").GetString()); + } + + [Fact] + public async Task Should_SendStreamFalse_ForNonStreaming() + { + await AIHub.Chat() + .WithModel(ModelId) + .WithMessage("hi") + .CompleteAsync(); + + var root = HttpHandler.LastRequestJson!.RootElement; + Assert.False(root.GetProperty("stream").GetBoolean()); + } + + [Fact] + public async Task Should_SendStreamTrue_ForStreaming() + { + HttpHandler.ResponseBody = OpenAiStreamResponse("hello"); + + await AIHub.Chat() + .WithModel(ModelId) + .WithMessage("hi") + .CompleteAsync(interactive: true); + + var root = HttpHandler.LastRequestJson!.RootElement; + Assert.True(root.GetProperty("stream").GetBoolean()); + } + + [Fact] + public async Task Should_MapTemperature_FromOpenAiInferenceParams() + { + await AIHub.Chat() + .WithModel(ModelId) + .WithMessage("hi") + .WithInferenceParams(new OpenAiInferenceParams { Temperature = 0.7f }) + .CompleteAsync(); + + var root = HttpHandler.LastRequestJson!.RootElement; + Assert.Equal(0.7f, root.GetProperty("temperature").GetSingle()); + } + + [Fact] + public async Task Should_MapMaxTokens_FromOpenAiInferenceParams() + { + await AIHub.Chat() + .WithModel(ModelId) + .WithMessage("hi") + .WithInferenceParams(new OpenAiInferenceParams { MaxTokens = 512 }) + .CompleteAsync(); + + var root = HttpHandler.LastRequestJson!.RootElement; + Assert.Equal(512, root.GetProperty("max_tokens").GetInt32()); + } + + [Fact] + public async Task Should_MapTopP_FromOpenAiInferenceParams() + { + await AIHub.Chat() + .WithModel(ModelId) + .WithMessage("hi") + .WithInferenceParams(new OpenAiInferenceParams { TopP = 0.9f }) + .CompleteAsync(); + + var root = HttpHandler.LastRequestJson!.RootElement; + Assert.Equal(0.9f, root.GetProperty("top_p").GetSingle()); + } + + [Fact] + public async Task Should_ParseContent_FromNonStreamingResponse() + { + HttpHandler.ResponseBody = OpenAiResponse("hello"); + + var result = await AIHub.Chat() + .WithModel(ModelId) + .WithMessage("hi") + .CompleteAsync(); + + Assert.Equal("hello", result.Message.Content); + } + + [Fact] + public async Task Should_SendAuthorizationHeader_WithBearerToken() + { + await AIHub.Chat() + .WithModel(ModelId) + .WithMessage("hi") + .CompleteAsync(); + + Assert.NotNull(HttpHandler.LastRequest!.Headers.Authorization); + Assert.Equal("Bearer", HttpHandler.LastRequest!.Headers.Authorization!.Scheme); + } + + [Fact] + public async Task Should_IncludeVisionContent_WhenModelIsVision() + { + const string visionModelId = "gpt-4o-vision"; + ModelRegistry.RegisterOrReplace(new GenericCloudVisionModel(visionModelId, BackendType.OpenAi)); + + var imageBytes = new byte[] { 0xFF, 0xD8, 0xFF, 0xE0 }; // JPEG magic bytes + + await AIHub.Chat() + .WithModel(visionModelId) + .WithMessage("describe this image", imageBytes) + .CompleteAsync(); + + var root = HttpHandler.LastRequestJson!.RootElement; + var messages = root.GetProperty("messages"); + var userMessage = messages.EnumerateArray() + .FirstOrDefault(m => m.GetProperty("role").GetString() == "user"); + + Assert.NotEqual(default, userMessage); + var content = userMessage.GetProperty("content"); + Assert.Equal(JsonValueKind.Array, content.ValueKind); + Assert.Contains(content.EnumerateArray() +, part => part.GetProperty("type").GetString() == "image_url"); + } + + [Fact] + public async Task Should_IncludeToolsArray_WhenToolsConfigured() + { + var tools = new ToolsConfiguration + { + Tools = + [ + new ToolDefinition + { + Type = "function", + Function = new FunctionDefinition + { + Name = "get_weather", + Description = "Get current weather", + Parameters = new { type = "object", properties = new { } } + }, + Execute = _ => Task.FromResult("sunny") + } + ] + }; + + await AIHub.Chat() + .WithModel(ModelId) + .WithMessage("what's the weather?") + .WithTools(tools) + .CompleteAsync(); + + var root = HttpHandler.LastRequestJson!.RootElement; + var toolsArray = root.GetProperty("tools"); + Assert.Equal(JsonValueKind.Array, toolsArray.ValueKind); + Assert.Equal("get_weather", + toolsArray[0].GetProperty("function").GetProperty("name").GetString()); + } + + [Fact] + public async Task Should_ThrowInvalidBackendParamsException_WhenWrongParams() + { + await Assert.ThrowsAsync(() => + AIHub.Chat() + .WithModel(ModelId) + .WithMessage("hi") + .WithInferenceParams(new AnthropicInferenceParams()) + .CompleteAsync()); + } +} diff --git a/MaIN.Core.IntegrationTests/PipelineTestBase.cs b/MaIN.Core.IntegrationTests/PipelineTestBase.cs new file mode 100644 index 00000000..f41010dd --- /dev/null +++ b/MaIN.Core.IntegrationTests/PipelineTestBase.cs @@ -0,0 +1,24 @@ +using MaIN.Core.IntegrationTests.Fakes; +using MaIN.Domain.Entities; +using MaIN.Services.Services.LLMService.Factory; +using MaIN.Services.Services.Models; +using Microsoft.Extensions.DependencyInjection; + +namespace MaIN.Core.IntegrationTests; + +public class PipelineTestBase : IntegrationTestBase +{ + protected readonly FakeLLMServiceFactory FakeFactory = new(); + + protected override void ConfigureServices(IServiceCollection services) + => services.AddSingleton(FakeFactory); + + protected void SetTextResponse(string content) => + FakeFactory.Service.Handler = chat => new ChatResult + { + Model = chat.ModelId ?? "fake", + Done = true, + CreatedAt = DateTime.UtcNow, + Message = new Message { Role = "assistant", Content = content, Type = MessageType.CloudLLM } + }; +} diff --git a/MaIN.sln b/MaIN.sln index 4417e84c..752ecbea 100644 --- a/MaIN.sln +++ b/MaIN.sln @@ -23,7 +23,9 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Examples.SimpleConsole", "E EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MaIN.InferPage", "src\MaIN.InferPage\MaIN.InferPage.csproj", "{B691188A-1170-489D-8729-A13108C12C57}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MaIN.Core.IntegrationTests", "MaIN.Core.IntegrationTests\MaIN.Core.IntegrationTests.csproj", "{2C15062A-E9F6-47FC-A4CD-1190A49E3FE3}" +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MaIN.Core.IntegrationTests", "MaIN.Core.IntegrationTests\MaIN.Core.IntegrationTests.csproj", "{C3D4E5F6-A7B8-9012-C3D4-E5F6A7B89012}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MaIN.Core.E2ETests", "MaIN.Core.E2ETests\MaIN.Core.E2ETests.csproj", "{2C15062A-E9F6-47FC-A4CD-1190A49E3FE3}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution @@ -63,6 +65,10 @@ Global {B691188A-1170-489D-8729-A13108C12C57}.Debug|Any CPU.Build.0 = Debug|Any CPU {B691188A-1170-489D-8729-A13108C12C57}.Release|Any CPU.ActiveCfg = Release|Any CPU {B691188A-1170-489D-8729-A13108C12C57}.Release|Any CPU.Build.0 = Release|Any CPU + {C3D4E5F6-A7B8-9012-C3D4-E5F6A7B89012}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C3D4E5F6-A7B8-9012-C3D4-E5F6A7B89012}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C3D4E5F6-A7B8-9012-C3D4-E5F6A7B89012}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C3D4E5F6-A7B8-9012-C3D4-E5F6A7B89012}.Release|Any CPU.Build.0 = Release|Any CPU {2C15062A-E9F6-47FC-A4CD-1190A49E3FE3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {2C15062A-E9F6-47FC-A4CD-1190A49E3FE3}.Debug|Any CPU.Build.0 = Debug|Any CPU {2C15062A-E9F6-47FC-A4CD-1190A49E3FE3}.Release|Any CPU.ActiveCfg = Release|Any CPU @@ -75,6 +81,7 @@ Global {781BDD20-65BA-4C5D-815B-D8A15931570A} = {28851935-517F-438D-BF7C-02FEB1A37A68} {46E6416B-1736-478C-B697-B37BB8E6A23E} = {53D24B04-279D-4D18-8829-EA0F57AE69F3} {75DEBB8A-75CD-44BA-9369-3916950428EF} = {28851935-517F-438D-BF7C-02FEB1A37A68} + {C3D4E5F6-A7B8-9012-C3D4-E5F6A7B89012} = {53D24B04-279D-4D18-8829-EA0F57AE69F3} {2C15062A-E9F6-47FC-A4CD-1190A49E3FE3} = {53D24B04-279D-4D18-8829-EA0F57AE69F3} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution diff --git a/src/MaIN.Domain/Models/Concrete/LocalModels.cs b/src/MaIN.Domain/Models/Concrete/LocalModels.cs index 7132e3b5..cf4e3a64 100644 --- a/src/MaIN.Domain/Models/Concrete/LocalModels.cs +++ b/src/MaIN.Domain/Models/Concrete/LocalModels.cs @@ -20,6 +20,7 @@ public sealed record Gemma3_4b() : LocalModel( 8192, "Balanced 4B model for writing, analysis, and mathematical reasoning"), IVisionModel { + // https://huggingface.co/ggml-org/gemma-3-4b-it-GGUF/blob/main/mmproj-model-f16.gguf public string MMProjectName => "mmproj-model-gemma3-4b.gguf"; } diff --git a/src/MaIN.Services/Services/LLMService/LLMService.cs b/src/MaIN.Services/Services/LLMService/LLMService.cs index 791df6b2..de7a9bd3 100644 --- a/src/MaIN.Services/Services/LLMService/LLMService.cs +++ b/src/MaIN.Services/Services/LLMService/LLMService.cs @@ -67,8 +67,12 @@ public LLMService( } var lastMsg = chat.Messages.Last(); + var model = GetLocalModel(chat); - await ChatHelper.ExtractImageFromFiles(lastMsg); + if (model is IVisionModel) + { + await ChatHelper.ExtractImageFromFiles(lastMsg); + } if (ChatHelper.HasFiles(lastMsg)) { @@ -80,8 +84,6 @@ public LLMService( { return await ProcessWithToolsAsync(chat, requestOptions, cancellationToken); } - - var model = GetLocalModel(chat); var tokens = await ProcessChatRequest(chat, model, lastMsg, requestOptions, cancellationToken); lastMsg.MarkProcessed(); return await CreateChatResult(chat, tokens, requestOptions);