From ef58bb8d3cb313de5002d3874f7f863bbe949693 Mon Sep 17 00:00:00 2001 From: finallylly Date: Mon, 30 Mar 2026 11:13:47 +0800 Subject: [PATCH] fix(config): correct MiniMax M2.7 highspeed model name and add thinking support (#1596) * fix(config): correct MiniMax M2.7 highspeed model name and add thinking support - Rename minimax-m2.5-highspeed to minimax-m2.7-highspeed for CN region - Add supports_thinking: true for both M2.7 and M2.7-highspeed models * Add supports_thinking option to config examples Added support_thinking configuration option in examples. --------- Co-authored-by: Willem Jiang --- config.example.yaml | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/config.example.yaml b/config.example.yaml index 0f7c3c9c7..137a5af3d 100644 --- a/config.example.yaml +++ b/config.example.yaml @@ -188,6 +188,7 @@ models: # max_tokens: 4096 # temperature: 1.0 # MiniMax requires temperature in (0.0, 1.0] # supports_vision: true + # supports_thinking: true # - name: minimax-m2.5-highspeed # display_name: MiniMax M2.5 Highspeed @@ -200,6 +201,7 @@ models: # max_tokens: 4096 # temperature: 1.0 # MiniMax requires temperature in (0.0, 1.0] # supports_vision: true + # supports_thinking: true # Example: MiniMax (OpenAI-compatible) - CN 中国区用户 # MiniMax provides high-performance models with 204K context window @@ -215,11 +217,12 @@ models: # max_tokens: 4096 # temperature: 1.0 # MiniMax requires temperature in (0.0, 1.0] # supports_vision: true + # supports_thinking: true - # - name: minimax-m2.5-highspeed - # display_name: MiniMax M2.5 Highspeed + # - name: minimax-m2.7-highspeed + # display_name: MiniMax M2.7 Highspeed # use: langchain_openai:ChatOpenAI - # model: MiniMax-M2.5-highspeed + # model: MiniMax-M2.7-highspeed # api_key: $MINIMAX_API_KEY # base_url: https://api.minimaxi.com/v1 # request_timeout: 600.0 @@ -227,6 +230,7 @@ models: # max_tokens: 4096 # temperature: 1.0 # MiniMax requires temperature in (0.0, 1.0] # supports_vision: true + # supports_thinking: true # Example: OpenRouter (OpenAI-compatible)