Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Examples/Examples/Chat/ChatCustomGrammarExample.cs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ public async Task Start()
await AIHub.Chat()
.WithModel(Models.Local.Gemma2_2b)
.WithMessage("Generate random person")
.WithInferenceParams(new InferenceParams
.WithInferenceParams(new LocalInferenceParams
{
Grammar = personGrammar
})
Expand Down
9 changes: 9 additions & 0 deletions Examples/Examples/Chat/ChatExampleOpenAi.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
using Examples.Utils;
using MaIN.Core.Hub;
using MaIN.Domain.Configuration.BackendInferenceParams;
using MaIN.Domain.Models;

namespace Examples.Chat;
Expand All @@ -15,6 +16,14 @@ public async Task Start()
await AIHub.Chat()
.WithModel(Models.OpenAi.Gpt5Nano)
.WithMessage("What do you consider to be the greatest invention in history?")
.WithInferenceParams(new OpenAiInferenceParams // We could override some inference params
{
ResponseFormat = "text",
AdditionalParams = new Dictionary<string, object>
{
["max_completion_tokens"] = 2137
}
})
.CompleteAsync(interactive: true);
}
}
2 changes: 1 addition & 1 deletion Examples/Examples/Chat/ChatGrammarExampleGemini.cs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ public async Task Start()
await AIHub.Chat()
.WithModel(Models.Gemini.Gemini2_5Flash)
.WithMessage("Generate random person")
.WithInferenceParams(new InferenceParams
.WithInferenceParams(new LocalInferenceParams
{
Grammar = new Grammar(grammarValue, GrammarFormat.JSONSchema)
})
Expand Down
320 changes: 320 additions & 0 deletions MaIN.Core.IntegrationTests/BackendParamsTests.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,320 @@
using MaIN.Core.Hub;
using MaIN.Domain.Configuration;
using MaIN.Domain.Entities;
using MaIN.Domain.Configuration.BackendInferenceParams;
using MaIN.Domain.Exceptions;
using MaIN.Domain.Models;
using MaIN.Domain.Models.Concrete;

namespace MaIN.Core.IntegrationTests;

public class BackendParamsTests : IntegrationTestBase
{
private const string TestQuestion = "What is 2+2? Answer with just the number.";

[SkippableFact]
public async Task OpenAi_Should_RespondWithParams()
{
SkipIfMissingKey(LLMApiRegistry.GetEntry(BackendType.OpenAi)?.ApiKeyEnvName!);

var result = await AIHub.Chat()
.WithModel(Models.OpenAi.Gpt4oMini)
.WithMessage(TestQuestion)
.WithInferenceParams(new OpenAiInferenceParams
{
Temperature = 0.3f,
MaxTokens = 100,
TopP = 0.9f
})
.CompleteAsync();

Assert.True(result.Done);
Assert.NotNull(result.Message);
Assert.NotEmpty(result.Message.Content);
Assert.Contains("4", result.Message.Content);
}

[SkippableFact]
public async Task Anthropic_Should_RespondWithParams()
{
SkipIfMissingKey(LLMApiRegistry.GetEntry(BackendType.Anthropic)?.ApiKeyEnvName!);

var result = await AIHub.Chat()
.WithModel(Models.Anthropic.ClaudeSonnet4)
.WithMessage(TestQuestion)
.WithInferenceParams(new AnthropicInferenceParams
{
Temperature = 0.3f,
MaxTokens = 100,
TopP = 0.9f
})
.CompleteAsync();

Assert.True(result.Done);
Assert.NotNull(result.Message);
Assert.NotEmpty(result.Message.Content);
Assert.Contains("4", result.Message.Content);
}

[SkippableFact]
public async Task Gemini_Should_RespondWithParams()
{
SkipIfMissingKey(LLMApiRegistry.GetEntry(BackendType.Gemini)?.ApiKeyEnvName!);

var result = await AIHub.Chat()
.WithModel(Models.Gemini.Gemini2_0Flash)
.WithMessage(TestQuestion)
.WithInferenceParams(new GeminiInferenceParams
{
Temperature = 0.3f,
MaxTokens = 100,
TopP = 0.9f
})
.CompleteAsync();

Assert.True(result.Done);
Assert.NotNull(result.Message);
Assert.NotEmpty(result.Message.Content);
Assert.Contains("4", result.Message.Content);
}

[SkippableFact]
public async Task DeepSeek_Should_RespondWithParams()
{
SkipIfMissingKey(LLMApiRegistry.GetEntry(BackendType.DeepSeek)?.ApiKeyEnvName!);

var result = await AIHub.Chat()
.WithModel(Models.DeepSeek.Reasoner)
.WithMessage(TestQuestion)
.WithInferenceParams(new DeepSeekInferenceParams
{
Temperature = 0.3f,
MaxTokens = 100,
TopP = 0.9f
})
.CompleteAsync();

Assert.True(result.Done);
Assert.NotNull(result.Message);
Assert.NotEmpty(result.Message.Content);
Assert.Contains("4", result.Message.Content);
}

[SkippableFact]
public async Task GroqCloud_Should_RespondWithParams()
{
SkipIfMissingKey(LLMApiRegistry.GetEntry(BackendType.GroqCloud)?.ApiKeyEnvName!);

var result = await AIHub.Chat()
.WithModel(Models.Groq.Llama3_1_8bInstant)
.WithMessage(TestQuestion)
.WithInferenceParams(new GroqCloudInferenceParams
{
Temperature = 0.3f,
MaxTokens = 100,
TopP = 0.9f
})
.CompleteAsync();

Assert.True(result.Done);
Assert.NotNull(result.Message);
Assert.NotEmpty(result.Message.Content);
Assert.Contains("4", result.Message.Content);
}

[SkippableFact]
public async Task Xai_Should_RespondWithParams()
{
SkipIfMissingKey(LLMApiRegistry.GetEntry(BackendType.Xai)?.ApiKeyEnvName!);

var result = await AIHub.Chat()
.WithModel(Models.Xai.Grok3Beta)
.WithMessage(TestQuestion)
.WithInferenceParams(new XaiInferenceParams
{
Temperature = 0.3f,
MaxTokens = 100,
TopP = 0.9f
})
.CompleteAsync();

Assert.True(result.Done);
Assert.NotNull(result.Message);
Assert.NotEmpty(result.Message.Content);
Assert.Contains("4", result.Message.Content);
}

[SkippableFact]
public async Task Self_Should_RespondWithParams()
{
Skip.If(!File.Exists("C:/Models/gemma2-2b.gguf"), "Local model not found at C:/Models/gemma2-2b.gguf");

var result = await AIHub.Chat()
.WithModel(Models.Local.Gemma2_2b)
.WithMessage(TestQuestion)
.WithInferenceParams(new LocalInferenceParams
{
Temperature = 0.3f,
ContextSize = 8192,
MaxTokens = 100,
TopK = 40,
TopP = 0.9f
})
.CompleteAsync();

Assert.True(result.Done);
Assert.NotNull(result.Message);
Assert.NotEmpty(result.Message.Content);
Assert.Contains("4", result.Message.Content);
}

[SkippableFact]
public async Task LocalOllama_Should_RespondWithParams()
{
SkipIfOllamaNotRunning();

var result = await AIHub.Chat()
.WithModel(Models.Ollama.Gemma3_4b)
.WithMessage(TestQuestion)
.WithInferenceParams(new OllamaInferenceParams
{
Temperature = 0.3f,
MaxTokens = 100,
TopK = 40,
TopP = 0.9f,
NumCtx = 2048
})
.CompleteAsync();

Assert.True(result.Done);
Assert.NotNull(result.Message);
Assert.NotEmpty(result.Message.Content);
Assert.Contains("4", result.Message.Content);
}

[SkippableFact]
public async Task ClaudOllama_Should_RespondWithParams()
{
SkipIfMissingKey(LLMApiRegistry.GetEntry(BackendType.Ollama)?.ApiKeyEnvName!);

var result = await AIHub.Chat()
.WithModel(Models.Ollama.Gemma3_4b)
.WithMessage(TestQuestion)
.WithInferenceParams(new OllamaInferenceParams
{
Temperature = 0.3f,
MaxTokens = 100,
TopK = 40,
TopP = 0.9f,
NumCtx = 2048
})
.CompleteAsync();

Assert.True(result.Done);
Assert.NotNull(result.Message);
Assert.NotEmpty(result.Message.Content);
Assert.Contains("4", result.Message.Content);
}

// --- Params mismatch validation (no API key required) ---

[Fact]
public async Task Self_Should_ThrowWhenGivenWrongParams()
{
await Assert.ThrowsAsync<InvalidBackendParamsException>(() =>
AIHub.Chat()
.WithModel(Models.Local.Gemma2_2b)
.WithMessage(TestQuestion)
.WithInferenceParams(new OpenAiInferenceParams())
.CompleteAsync());
}

[Fact]
public async Task OpenAi_Should_ThrowWhenGivenWrongParams()
{
await Assert.ThrowsAsync<InvalidBackendParamsException>(() =>
AIHub.Chat()
.WithModel(Models.OpenAi.Gpt4oMini)
.WithMessage(TestQuestion)
.WithInferenceParams(new DeepSeekInferenceParams())
.CompleteAsync());
}

[Fact]
public async Task Anthropic_Should_ThrowWhenGivenWrongParams()
{
await Assert.ThrowsAsync<InvalidBackendParamsException>(() =>
AIHub.Chat()
.WithModel(Models.Anthropic.ClaudeSonnet4)
.WithMessage(TestQuestion)
.WithInferenceParams(new OpenAiInferenceParams())
.CompleteAsync());
}

[Fact]
public async Task Gemini_Should_ThrowWhenGivenWrongParams()
{
await Assert.ThrowsAsync<InvalidBackendParamsException>(() =>
AIHub.Chat()
.WithModel(Models.Gemini.Gemini2_0Flash)
.WithMessage(TestQuestion)
.WithInferenceParams(new AnthropicInferenceParams())
.CompleteAsync());
}

[Fact]
public async Task DeepSeek_Should_ThrowWhenGivenWrongParams()
{
await Assert.ThrowsAsync<InvalidBackendParamsException>(() =>
AIHub.Chat()
.WithModel(Models.DeepSeek.Reasoner)
.WithMessage(TestQuestion)
.WithInferenceParams(new GeminiInferenceParams())
.CompleteAsync());
}

[Fact]
public async Task GroqCloud_Should_ThrowWhenGivenWrongParams()
{
await Assert.ThrowsAsync<InvalidBackendParamsException>(() =>
AIHub.Chat()
.WithModel(Models.Groq.Llama3_1_8bInstant)
.WithMessage(TestQuestion)
.WithInferenceParams(new OpenAiInferenceParams())
.CompleteAsync());
}

[Fact]
public async Task Xai_Should_ThrowWhenGivenWrongParams()
{
await Assert.ThrowsAsync<InvalidBackendParamsException>(() =>
AIHub.Chat()
.WithModel(Models.Xai.Grok3Beta)
.WithMessage(TestQuestion)
.WithInferenceParams(new AnthropicInferenceParams())
.CompleteAsync());
}

[Fact]
public async Task Ollama_Should_ThrowWhenGivenWrongParams()
{
await Assert.ThrowsAsync<InvalidBackendParamsException>(() =>
AIHub.Chat()
.WithModel(Models.Ollama.Gemma3_4b)
.WithMessage(TestQuestion)
.WithInferenceParams(new DeepSeekInferenceParams())
.CompleteAsync());
}

private static void SkipIfMissingKey(string envName)
{
Skip.If(string.IsNullOrEmpty(Environment.GetEnvironmentVariable(envName)),
$"{envName} environment variable not set");
}

private static void SkipIfOllamaNotRunning()
{
Skip.If(!Helpers.NetworkHelper.PingHost("127.0.0.1", 11434, 3),
"Ollama is not running on localhost:11434");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Xunit.SkippableFact" Version="1.5.61" />
</ItemGroup>

<ItemGroup>
Expand Down
3 changes: 3 additions & 0 deletions Releases/0.10.2.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# 0.10.2 release

Inference parameters are now backend-specific — each AI provider has its own typed params class where only explicitly set values are sent to the API, with an AdditionalParams dictionary for custom fields.
4 changes: 2 additions & 2 deletions src/MaIN.Core.UnitTests/AgentContextTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ public async Task CreateAsync_ShouldCallAgentServiceCreateAgent()
It.IsAny<Agent>(),
It.IsAny<bool>(),
It.IsAny<bool>(),
It.IsAny<InferenceParams>(),
It.IsAny<IBackendInferenceParams>(),
It.IsAny<MemoryParams>(),
It.IsAny<bool>()))
.ReturnsAsync(agent);
Expand All @@ -153,7 +153,7 @@ public async Task CreateAsync_ShouldCallAgentServiceCreateAgent()
It.IsAny<Agent>(),
It.Is<bool>(f => f == true),
It.Is<bool>(r => r == false),
It.IsAny<InferenceParams>(),
It.IsAny<IBackendInferenceParams>(),
It.IsAny<MemoryParams>(),
It.IsAny<bool>()),
Times.Once);
Expand Down
Loading
Loading