From 7504dc2947065ded46689f8081289a741b76536d Mon Sep 17 00:00:00 2001 From: amankalra172 Date: Sun, 8 Feb 2026 12:35:51 +0100 Subject: [PATCH] feat: add STACKIT provider with 8 AI models Add STACKIT as a new provider with complete model specifications: Chat Models: - Llama 3.1 8B Instruct FP8 - Llama 3.3 70B Instruct FP8 - GPT-OSS 120B - Mistral Nemo Instruct 2407 FP8 - Gemma 3 27B (multimodal) - Qwen3-VL 235B (vision-language) Embedding Models: - E5 Mistral 7B - Qwen3-VL Embedding 8B (multimodal) All models include: - Proper schema compliance (attachment, reasoning, tool_call, etc.) - Pricing in USD per million tokens - Context limits and modalities - Official STACKIT logo with currentColor support STACKIT is a German sovereign cloud provider offering OpenAI-compatible AI model serving with open-source models. --- providers/stackit/logo.svg | 4 ++++ providers/stackit/models/e5-mistral-7b.toml | 21 +++++++++++++++++++ providers/stackit/models/gemma-3-27b.toml | 21 +++++++++++++++++++ providers/stackit/models/gpt-oss-120b.toml | 21 +++++++++++++++++++ providers/stackit/models/llama-3.1-8b.toml | 21 +++++++++++++++++++ providers/stackit/models/llama-3.3-70b.toml | 21 +++++++++++++++++++ providers/stackit/models/mistral-nemo.toml | 21 +++++++++++++++++++ providers/stackit/models/qwen3-vl-235b.toml | 21 +++++++++++++++++++ .../stackit/models/qwen3-vl-embedding-8b.toml | 21 +++++++++++++++++++ providers/stackit/provider.toml | 5 +++++ 10 files changed, 177 insertions(+) create mode 100644 providers/stackit/logo.svg create mode 100644 providers/stackit/models/e5-mistral-7b.toml create mode 100644 providers/stackit/models/gemma-3-27b.toml create mode 100644 providers/stackit/models/gpt-oss-120b.toml create mode 100644 providers/stackit/models/llama-3.1-8b.toml create mode 100644 providers/stackit/models/llama-3.3-70b.toml create mode 100644 providers/stackit/models/mistral-nemo.toml create mode 100644 providers/stackit/models/qwen3-vl-235b.toml create mode 100644 providers/stackit/models/qwen3-vl-embedding-8b.toml create mode 100644 providers/stackit/provider.toml diff --git a/providers/stackit/logo.svg b/providers/stackit/logo.svg new file mode 100644 index 000000000..0d78b781a --- /dev/null +++ b/providers/stackit/logo.svg @@ -0,0 +1,4 @@ + + + + diff --git a/providers/stackit/models/e5-mistral-7b.toml b/providers/stackit/models/e5-mistral-7b.toml new file mode 100644 index 000000000..efdd3534f --- /dev/null +++ b/providers/stackit/models/e5-mistral-7b.toml @@ -0,0 +1,21 @@ +name = "E5 Mistral 7B" +release_date = "2023-12-11" +last_updated = "2023-12-11" +attachment = false +reasoning = false +temperature = false +tool_call = false +structured_output = false +open_weights = true + +[cost] +input = 0.02 +output = 0.02 + +[limit] +context = 4_096 +output = 4_096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/stackit/models/gemma-3-27b.toml b/providers/stackit/models/gemma-3-27b.toml new file mode 100644 index 000000000..28d206ac1 --- /dev/null +++ b/providers/stackit/models/gemma-3-27b.toml @@ -0,0 +1,21 @@ +name = "Gemma 3 27B" +release_date = "2025-05-17" +last_updated = "2025-05-17" +attachment = true +reasoning = false +temperature = true +tool_call = false +structured_output = false +open_weights = true + +[cost] +input = 0.53 +output = 0.77 + +[limit] +context = 37_000 +output = 8_192 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/stackit/models/gpt-oss-120b.toml b/providers/stackit/models/gpt-oss-120b.toml new file mode 100644 index 000000000..64bf3e8e1 --- /dev/null +++ b/providers/stackit/models/gpt-oss-120b.toml @@ -0,0 +1,21 @@ +name = "GPT-OSS 120B" +release_date = "2025-08-05" +last_updated = "2025-08-05" +attachment = false +reasoning = true +temperature = true +tool_call = true +structured_output = false +open_weights = true + +[cost] +input = 0.53 +output = 0.77 + +[limit] +context = 128_000 +output = 8_192 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/stackit/models/llama-3.1-8b.toml b/providers/stackit/models/llama-3.1-8b.toml new file mode 100644 index 000000000..aed8a66f4 --- /dev/null +++ b/providers/stackit/models/llama-3.1-8b.toml @@ -0,0 +1,21 @@ +name = "Llama 3.1 8B" +release_date = "2024-07-23" +last_updated = "2024-07-23" +attachment = false +reasoning = false +temperature = true +tool_call = true +structured_output = true +open_weights = true + +[cost] +input = 0.18 +output = 0.30 + +[limit] +context = 128_000 +output = 8_192 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/stackit/models/llama-3.3-70b.toml b/providers/stackit/models/llama-3.3-70b.toml new file mode 100644 index 000000000..a4f70d8b2 --- /dev/null +++ b/providers/stackit/models/llama-3.3-70b.toml @@ -0,0 +1,21 @@ +name = "Llama 3.3 70B" +release_date = "2024-12-05" +last_updated = "2024-12-05" +attachment = false +reasoning = false +temperature = true +tool_call = true +structured_output = false +open_weights = true + +[cost] +input = 0.53 +output = 0.77 + +[limit] +context = 128_000 +output = 8_192 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/stackit/models/mistral-nemo.toml b/providers/stackit/models/mistral-nemo.toml new file mode 100644 index 000000000..92c60e517 --- /dev/null +++ b/providers/stackit/models/mistral-nemo.toml @@ -0,0 +1,21 @@ +name = "Mistral Nemo" +release_date = "2024-07-01" +last_updated = "2024-07-01" +attachment = false +reasoning = false +temperature = true +tool_call = true +structured_output = false +open_weights = true + +[cost] +input = 0.53 +output = 0.77 + +[limit] +context = 128_000 +output = 8_192 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/stackit/models/qwen3-vl-235b.toml b/providers/stackit/models/qwen3-vl-235b.toml new file mode 100644 index 000000000..6d5e26ca6 --- /dev/null +++ b/providers/stackit/models/qwen3-vl-235b.toml @@ -0,0 +1,21 @@ +name = "Qwen3-VL 235B" +release_date = "2024-11-01" +last_updated = "2024-11-01" +attachment = true +reasoning = false +temperature = true +tool_call = true +structured_output = false +open_weights = true + +[cost] +input = 1.77 +output = 2.07 + +[limit] +context = 218_000 +output = 8_192 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/stackit/models/qwen3-vl-embedding-8b.toml b/providers/stackit/models/qwen3-vl-embedding-8b.toml new file mode 100644 index 000000000..ef4c3df08 --- /dev/null +++ b/providers/stackit/models/qwen3-vl-embedding-8b.toml @@ -0,0 +1,21 @@ +name = "Qwen3-VL Embedding 8B" +release_date = "2026-02-05" +last_updated = "2026-02-05" +attachment = true +reasoning = false +temperature = false +tool_call = false +structured_output = false +open_weights = true + +[cost] +input = 0.09 +output = 0.09 + +[limit] +context = 32_000 +output = 4_096 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/stackit/provider.toml b/providers/stackit/provider.toml new file mode 100644 index 000000000..9ab06dd8e --- /dev/null +++ b/providers/stackit/provider.toml @@ -0,0 +1,5 @@ +name = "STACKIT" +env = ["STACKIT_API_KEY"] +npm = "@ai-sdk/openai-compatible" +doc = "https://docs.stackit.cloud/products/data-and-ai/ai-model-serving/basics/available-shared-models" +api = "https://api.openai-compat.model-serving.eu01.onstackit.cloud/v1"