diff --git a/providers/t-systems/logo.svg b/providers/t-systems/logo.svg new file mode 100644 index 000000000..31cafd280 --- /dev/null +++ b/providers/t-systems/logo.svg @@ -0,0 +1,2 @@ + + diff --git a/providers/t-systems/models/claude-3-7-sonnet.toml b/providers/t-systems/models/claude-3-7-sonnet.toml new file mode 100644 index 000000000..6451be5d5 --- /dev/null +++ b/providers/t-systems/models/claude-3-7-sonnet.toml @@ -0,0 +1,7 @@ +[extends] +from = "anthropic/claude-3-7-sonnet-20250219" +omit = ["cost.cache_read", "cost.cache_write"] + +[cost] +input = 3.31 +output = 16.55 diff --git a/providers/t-systems/models/claude-sonnet-4.5.toml b/providers/t-systems/models/claude-sonnet-4.5.toml new file mode 100644 index 000000000..1e14df8be --- /dev/null +++ b/providers/t-systems/models/claude-sonnet-4.5.toml @@ -0,0 +1,9 @@ +name = "Claude Sonnet 4.5" + +[extends] +from = "anthropic/claude-sonnet-4-5" +omit = ["cost.cache_read", "cost.cache_write"] + +[cost] +input = 7.28 +output = 27.38 diff --git a/providers/t-systems/models/claude-sonnet-4.toml b/providers/t-systems/models/claude-sonnet-4.toml new file mode 100644 index 000000000..21ee51e44 --- /dev/null +++ b/providers/t-systems/models/claude-sonnet-4.toml @@ -0,0 +1,9 @@ +name = "Claude Sonnet 4" + +[extends] +from = "anthropic/claude-sonnet-4-0" +omit = ["cost.cache_read", "cost.cache_write"] + +[cost] +input = 3.31 +output = 16.55 diff --git a/providers/t-systems/models/gemini-2.5-flash.toml b/providers/t-systems/models/gemini-2.5-flash.toml new file mode 100644 index 000000000..1394c7f1d --- /dev/null +++ b/providers/t-systems/models/gemini-2.5-flash.toml @@ -0,0 +1,10 @@ +release_date = "2025-06-17" +last_updated = "2025-06-17" + +[extends] +from = "google/gemini-2.5-flash" +omit = ["cost.cache_read", "cost.input_audio", "structured_output"] + +[cost] +input = 1.38 +output = 11.03 diff --git a/providers/t-systems/models/gemini-2.5-pro.toml b/providers/t-systems/models/gemini-2.5-pro.toml new file mode 100644 index 000000000..71225cf53 --- /dev/null +++ b/providers/t-systems/models/gemini-2.5-pro.toml @@ -0,0 +1,7 @@ +[extends] +from = "google/gemini-2.5-pro" +omit = ["cost.cache_read", "cost.context_over_200k", "structured_output"] + +[cost] +input = 2.76 +output = 16.55 diff --git a/providers/t-systems/models/gemini-3-pro.toml b/providers/t-systems/models/gemini-3-pro.toml new file mode 100644 index 000000000..451d580e7 --- /dev/null +++ b/providers/t-systems/models/gemini-3-pro.toml @@ -0,0 +1,13 @@ +name = "Gemini 3 Pro" + +[extends] +from = "google/gemini-3-pro-preview" +omit = ["cost.cache_read", "cost.context_over_200k"] + +[cost] +input = 4.41 +output = 19.86 + +[limit] +context = 1_048_576 +output = 65_536 diff --git a/providers/t-systems/models/gpt-4.1-mini.toml b/providers/t-systems/models/gpt-4.1-mini.toml new file mode 100644 index 000000000..102c7798c --- /dev/null +++ b/providers/t-systems/models/gpt-4.1-mini.toml @@ -0,0 +1,13 @@ +name = "GPT-4.1 Mini" + +[extends] +from = "openai/gpt-4.1-mini" +omit = ["cost.cache_read"] + +[cost] +input = 0.44 +output = 1.73 + +[modalities] +input = ["text", "image"] +output = ["text"] diff --git a/providers/t-systems/models/gpt-4.1-nano.toml b/providers/t-systems/models/gpt-4.1-nano.toml new file mode 100644 index 000000000..4f8ebfc94 --- /dev/null +++ b/providers/t-systems/models/gpt-4.1-nano.toml @@ -0,0 +1,9 @@ +name = "GPT-4.1 Nano" + +[extends] +from = "openai/gpt-4.1-nano" +omit = ["cost.cache_read"] + +[cost] +input = 0.11 +output = 0.43 diff --git a/providers/t-systems/models/gpt-4.1.toml b/providers/t-systems/models/gpt-4.1.toml new file mode 100644 index 000000000..63da2e80f --- /dev/null +++ b/providers/t-systems/models/gpt-4.1.toml @@ -0,0 +1,7 @@ +[extends] +from = "openai/gpt-4.1" +omit = ["cost.cache_read"] + +[cost] +input = 2.17 +output = 8.66 diff --git a/providers/t-systems/models/gpt-4o-mini.toml b/providers/t-systems/models/gpt-4o-mini.toml new file mode 100644 index 000000000..996b5a423 --- /dev/null +++ b/providers/t-systems/models/gpt-4o-mini.toml @@ -0,0 +1,9 @@ +name = "GPT-4o Mini" + +[extends] +from = "openai/gpt-4o-mini" +omit = ["cost.cache_read"] + +[cost] +input = 0.19 +output = 0.78 diff --git a/providers/t-systems/models/gpt-4o.toml b/providers/t-systems/models/gpt-4o.toml new file mode 100644 index 000000000..367c1e28f --- /dev/null +++ b/providers/t-systems/models/gpt-4o.toml @@ -0,0 +1,7 @@ +[extends] +from = "openai/gpt-4o" +omit = ["cost.cache_read"] + +[cost] +input = 3.25 +output = 12.99 diff --git a/providers/t-systems/models/gpt-5-codex.toml b/providers/t-systems/models/gpt-5-codex.toml new file mode 100644 index 000000000..8387b398e --- /dev/null +++ b/providers/t-systems/models/gpt-5-codex.toml @@ -0,0 +1,9 @@ +name = "GPT-5 Codex" + +[extends] +from = "openai/gpt-5-codex" +omit = ["cost.cache_read"] + +[cost] +input = 1.35 +output = 10.70 diff --git a/providers/t-systems/models/gpt-5-mini.toml b/providers/t-systems/models/gpt-5-mini.toml new file mode 100644 index 000000000..21ecff2e9 --- /dev/null +++ b/providers/t-systems/models/gpt-5-mini.toml @@ -0,0 +1,7 @@ +[extends] +from = "openai/gpt-5-mini" +omit = ["cost.cache_read"] + +[cost] +input = 0.30 +output = 2.44 diff --git a/providers/t-systems/models/gpt-5-nano.toml b/providers/t-systems/models/gpt-5-nano.toml new file mode 100644 index 000000000..184440093 --- /dev/null +++ b/providers/t-systems/models/gpt-5-nano.toml @@ -0,0 +1,7 @@ +[extends] +from = "openai/gpt-5-nano" +omit = ["cost.cache_read"] + +[cost] +input = 0.06 +output = 0.49 diff --git a/providers/t-systems/models/gpt-5.toml b/providers/t-systems/models/gpt-5.toml new file mode 100644 index 000000000..5ce8919a9 --- /dev/null +++ b/providers/t-systems/models/gpt-5.toml @@ -0,0 +1,9 @@ +temperature = true + +[extends] +from = "openai/gpt-5" +omit = ["cost.cache_read"] + +[cost] +input = 1.52 +output = 12.18 diff --git a/providers/t-systems/models/gpt-image-1.toml b/providers/t-systems/models/gpt-image-1.toml new file mode 100644 index 000000000..f00babe80 --- /dev/null +++ b/providers/t-systems/models/gpt-image-1.toml @@ -0,0 +1,22 @@ +name = "GPT Image 1" +family = "gpt-image" +release_date = "2025-12-16" +last_updated = "2025-12-16" +attachment = true +reasoning = false +temperature = false +tool_call = false +open_weights = false + +[cost] +input = 12.72 +output = 50.85 + +[limit] +context = 0 +input = 0 +output = 0 + +[modalities] +input = ["text", "image"] +output = ["text", "image"] diff --git a/providers/t-systems/models/gpt-oss-120b.toml b/providers/t-systems/models/gpt-oss-120b.toml new file mode 100644 index 000000000..c10f5f16f --- /dev/null +++ b/providers/t-systems/models/gpt-oss-120b.toml @@ -0,0 +1,21 @@ +name = "GPT-OSS 120B" +family = "gpt-oss" +release_date = "2025-08-05" +last_updated = "2025-08-05" +attachment = false +reasoning = true +temperature = true +tool_call = true +open_weights = true + +[cost] +input = 3.53 +output = 3.53 + +[limit] +context = 131_072 +output = 26_215 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/t-systems/models/jina-embeddings-v2-base-code.toml b/providers/t-systems/models/jina-embeddings-v2-base-code.toml new file mode 100644 index 000000000..528bbcec4 --- /dev/null +++ b/providers/t-systems/models/jina-embeddings-v2-base-code.toml @@ -0,0 +1,21 @@ +name = "Jina Embeddings v2 Base Code" +family = "text-embedding" +release_date = "2024-01-01" +last_updated = "2024-01-01" +attachment = false +reasoning = false +temperature = false +tool_call = false +open_weights = true + +[cost] +input = 0.48 +output = 0.48 + +[limit] +context = 8_192 +output = 0 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/t-systems/models/jina-embeddings-v2-base-de.toml b/providers/t-systems/models/jina-embeddings-v2-base-de.toml new file mode 100644 index 000000000..addab06fe --- /dev/null +++ b/providers/t-systems/models/jina-embeddings-v2-base-de.toml @@ -0,0 +1,21 @@ +name = "Jina Embeddings v2 Base DE" +family = "text-embedding" +release_date = "2024-01-01" +last_updated = "2024-01-01" +attachment = false +reasoning = false +temperature = false +tool_call = false +open_weights = true + +[cost] +input = 0.48 +output = 0.48 + +[limit] +context = 8_192 +output = 0 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/t-systems/models/llama-3.3-70b-instruct.toml b/providers/t-systems/models/llama-3.3-70b-instruct.toml new file mode 100644 index 000000000..247b79413 --- /dev/null +++ b/providers/t-systems/models/llama-3.3-70b-instruct.toml @@ -0,0 +1,13 @@ +name = "Llama 3.3 70B Instruct" +attachment = false + +[extends] +from = "llama/llama-3.3-70b-instruct" + +[cost] +input = 3.53 +output = 3.53 + +[limit] +context = 131_072 +output = 32_768 diff --git a/providers/t-systems/models/mistral-large-2411.toml b/providers/t-systems/models/mistral-large-2411.toml new file mode 100644 index 000000000..aa3fe86c9 --- /dev/null +++ b/providers/t-systems/models/mistral-large-2411.toml @@ -0,0 +1,6 @@ +[extends] +from = "mistral/mistral-large-2411" + +[cost] +input = 2.86 +output = 6.62 diff --git a/providers/t-systems/models/mistral-medium-3.toml b/providers/t-systems/models/mistral-medium-3.toml new file mode 100644 index 000000000..d57b9444a --- /dev/null +++ b/providers/t-systems/models/mistral-medium-3.toml @@ -0,0 +1,6 @@ +[extends] +from = "mistral/mistral-medium-2505" + +[cost] +input = 0.44 +output = 2.21 diff --git a/providers/t-systems/models/mistral-small-24b-instruct-2501.toml b/providers/t-systems/models/mistral-small-24b-instruct-2501.toml new file mode 100644 index 000000000..8dd2c444b --- /dev/null +++ b/providers/t-systems/models/mistral-small-24b-instruct-2501.toml @@ -0,0 +1,21 @@ +name = "Mistral Small 24B" +family = "mistral-small" +release_date = "2025-01-01" +last_updated = "2025-01-01" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = true + +[cost] +input = 3.53 +output = 3.53 + +[limit] +context = 131_072 +output = 32_768 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/t-systems/models/o1-mini.toml b/providers/t-systems/models/o1-mini.toml new file mode 100644 index 000000000..32dfb5c3b --- /dev/null +++ b/providers/t-systems/models/o1-mini.toml @@ -0,0 +1,7 @@ +[extends] +from = "openai/o1-mini" +omit = ["cost.cache_read"] + +[cost] +input = 3.90 +output = 15.58 diff --git a/providers/t-systems/models/o1.toml b/providers/t-systems/models/o1.toml new file mode 100644 index 000000000..1cd1738cd --- /dev/null +++ b/providers/t-systems/models/o1.toml @@ -0,0 +1,7 @@ +[extends] +from = "openai/o1" +omit = ["cost.cache_read"] + +[cost] +input = 19.48 +output = 77.92 diff --git a/providers/t-systems/models/o3-mini.toml b/providers/t-systems/models/o3-mini.toml new file mode 100644 index 000000000..6f8c6c1e7 --- /dev/null +++ b/providers/t-systems/models/o3-mini.toml @@ -0,0 +1,7 @@ +[extends] +from = "openai/o3-mini" +omit = ["cost.cache_read"] + +[cost] +input = 1.43 +output = 5.71 diff --git a/providers/t-systems/models/o3.toml b/providers/t-systems/models/o3.toml new file mode 100644 index 000000000..e57f987dc --- /dev/null +++ b/providers/t-systems/models/o3.toml @@ -0,0 +1,7 @@ +[extends] +from = "openai/o3" +omit = ["cost.cache_read"] + +[cost] +input = 2.18 +output = 8.71 diff --git a/providers/t-systems/models/o4-mini.toml b/providers/t-systems/models/o4-mini.toml new file mode 100644 index 000000000..835d27551 --- /dev/null +++ b/providers/t-systems/models/o4-mini.toml @@ -0,0 +1,7 @@ +[extends] +from = "openai/o4-mini" +omit = ["cost.cache_read"] + +[cost] +input = 1.19 +output = 4.76 diff --git a/providers/t-systems/models/qwen2.5-coder-32b-instruct-fp8.toml b/providers/t-systems/models/qwen2.5-coder-32b-instruct-fp8.toml new file mode 100644 index 000000000..368c2d06c --- /dev/null +++ b/providers/t-systems/models/qwen2.5-coder-32b-instruct-fp8.toml @@ -0,0 +1,21 @@ +name = "Qwen 2.5 Coder 32B" +family = "qwen" +release_date = "2024-11-11" +last_updated = "2024-11-11" +attachment = false +reasoning = false +temperature = true +tool_call = true +open_weights = true + +[cost] +input = 3.53 +output = 3.53 + +[limit] +context = 131_072 +output = 32_768 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/t-systems/models/qwen3-30b-a3b-fp8.toml b/providers/t-systems/models/qwen3-30b-a3b-fp8.toml new file mode 100644 index 000000000..da59b035f --- /dev/null +++ b/providers/t-systems/models/qwen3-30b-a3b-fp8.toml @@ -0,0 +1,21 @@ +name = "Qwen3 30B A3B" +family = "qwen" +release_date = "2025-04" +last_updated = "2025-04" +attachment = false +reasoning = true +temperature = true +tool_call = true +open_weights = true + +[cost] +input = 3.53 +output = 3.53 + +[limit] +context = 40_960 +output = 40_960 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/t-systems/models/qwen3-next-80b-a3b-instruct-fp8.toml b/providers/t-systems/models/qwen3-next-80b-a3b-instruct-fp8.toml new file mode 100644 index 000000000..bef7904b0 --- /dev/null +++ b/providers/t-systems/models/qwen3-next-80b-a3b-instruct-fp8.toml @@ -0,0 +1,15 @@ +name = "Qwen3 Next 80B Instruct" +release_date = "2025-09-11" +last_updated = "2025-09-11" + +[extends] +from = "alibaba/qwen3-next-80b-a3b-instruct" +omit = ["knowledge"] + +[cost] +input = 3.53 +output = 3.53 + +[limit] +context = 131_072 +output = 52_429 diff --git a/providers/t-systems/models/qwen3-vl-30b-a3b-instruct-fp8.toml b/providers/t-systems/models/qwen3-vl-30b-a3b-instruct-fp8.toml new file mode 100644 index 000000000..2430f373b --- /dev/null +++ b/providers/t-systems/models/qwen3-vl-30b-a3b-instruct-fp8.toml @@ -0,0 +1,13 @@ +name = "Qwen3 VL 30B Instruct" +attachment = true +reasoning = false +release_date = "2025-10-05" +last_updated = "2025-10-05" + +[extends] +from = "alibaba/qwen3-vl-30b-a3b" +omit = ["cost.reasoning", "knowledge"] + +[cost] +input = 3.53 +output = 3.53 diff --git a/providers/t-systems/models/teuken-7b-instruct-v04.toml b/providers/t-systems/models/teuken-7b-instruct-v04.toml new file mode 100644 index 000000000..aa91bd47f --- /dev/null +++ b/providers/t-systems/models/teuken-7b-instruct-v04.toml @@ -0,0 +1,20 @@ +name = "Teuken 7B Instruct" +release_date = "2024-10-01" +last_updated = "2024-10-01" +attachment = false +reasoning = false +temperature = true +tool_call = false +open_weights = true + +[cost] +input = 3.53 +output = 3.53 + +[limit] +context = 8_192 +output = 4_096 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/t-systems/models/text-embedding-ada-002.toml b/providers/t-systems/models/text-embedding-ada-002.toml new file mode 100644 index 000000000..54ddd8a50 --- /dev/null +++ b/providers/t-systems/models/text-embedding-ada-002.toml @@ -0,0 +1,13 @@ +name = "Text Embedding Ada 002" + +[extends] +from = "openai/text-embedding-ada-002" +omit = ["knowledge"] + +[cost] +input = 0.11 +output = 0.11 + +[limit] +context = 8192 +output = 0 diff --git a/providers/t-systems/models/text-embedding-bge-m3.toml b/providers/t-systems/models/text-embedding-bge-m3.toml new file mode 100644 index 000000000..118f93bb6 --- /dev/null +++ b/providers/t-systems/models/text-embedding-bge-m3.toml @@ -0,0 +1,21 @@ +name = "BGE M3" +family = "bge" +release_date = "2024-01-30" +last_updated = "2024-01-30" +attachment = false +reasoning = false +temperature = false +tool_call = false +open_weights = true + +[cost] +input = 0.48 +output = 0.48 + +[limit] +context = 8_192 +output = 0 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/t-systems/models/tsi-embedding-colqwen2-2b-v1.toml b/providers/t-systems/models/tsi-embedding-colqwen2-2b-v1.toml new file mode 100644 index 000000000..066fcd4bf --- /dev/null +++ b/providers/t-systems/models/tsi-embedding-colqwen2-2b-v1.toml @@ -0,0 +1,21 @@ +name = "TSI Embedding ColQwen2 2B" +family = "text-embedding" +release_date = "2025-01-01" +last_updated = "2025-01-01" +attachment = false +reasoning = false +temperature = false +tool_call = false +open_weights = true + +[cost] +input = 0.00 +output = 0.00 + +[limit] +context = 8_192 +output = 0 + +[modalities] +input = ["text"] +output = ["text"] diff --git a/providers/t-systems/models/whisper-large-v3-turbo.toml b/providers/t-systems/models/whisper-large-v3-turbo.toml new file mode 100644 index 000000000..d454d6921 --- /dev/null +++ b/providers/t-systems/models/whisper-large-v3-turbo.toml @@ -0,0 +1,21 @@ +name = "Whisper Large V3 Turbo" +family = "whisper" +release_date = "2024-10-01" +last_updated = "2024-10-01" +attachment = false +reasoning = false +temperature = true +tool_call = false +open_weights = true + +[cost] +input = 92.11 +output = 92.11 + +[limit] +context = 448 +output = 448 + +[modalities] +input = ["audio"] +output = ["text"] diff --git a/providers/t-systems/models/whisper-large-v3.toml b/providers/t-systems/models/whisper-large-v3.toml new file mode 100644 index 000000000..3b16a07ec --- /dev/null +++ b/providers/t-systems/models/whisper-large-v3.toml @@ -0,0 +1,21 @@ +name = "Whisper Large V3" +family = "whisper" +release_date = "2023-09-01" +last_updated = "2023-09-01" +attachment = false +reasoning = false +temperature = true +tool_call = false +open_weights = true + +[cost] +input = 146.10 +output = 146.10 + +[limit] +context = 448 +output = 448 + +[modalities] +input = ["audio"] +output = ["text"] diff --git a/providers/t-systems/provider.toml b/providers/t-systems/provider.toml new file mode 100644 index 000000000..bdd304616 --- /dev/null +++ b/providers/t-systems/provider.toml @@ -0,0 +1,5 @@ +name = "T-Systems AI Foundation Services" +env = ["T_SYSTEMS_API_KEY"] +npm = "@ai-sdk/openai-compatible" +api = "https://llm-server.llmhub.t-systems.net/v2" +doc = "https://docs.llmhub.t-systems.net/"