perf: AI机器人支持多会话

This commit is contained in:
kuaifan 2025-02-10 19:49:30 +09:00
parent f0b5e0c3b9
commit ffe7ebf711
5 changed files with 176 additions and 122 deletions

View File

@ -66,10 +66,14 @@ class Setting extends AbstractModel
}
$array = [];
$aiList = ['openai', 'claude', 'deepseek', 'gemini', 'zhipu', 'qianwen', 'wenxin'];
$fieldList = ['key', 'model', 'base_url', 'agency', 'system', 'secret'];
$fieldList = ['key', 'model', 'base_url', 'agency', 'temperature', 'system', 'secret'];
foreach ($aiList as $aiName) {
foreach ($fieldList as $fieldName) {
$key = $aiName . '_' . $fieldName;
if ($fieldName == 'temperature' && $value[$key]) {
$array[$key] = floatval(min(1, max(0, floatval($value[$key]) ?: 0.7)));
continue;
}
$array[$key] = $value[$key] ?: match ($key) {
'openai_model' => 'gpt-4o-mini',
'claude_model' => 'claude-3-5-sonnet-latest',

View File

@ -431,6 +431,9 @@ class BotReceiveMsgTask extends AbstractTask
'agency' => $setting[$type . '_agency'],
'server_url' => $serverUrl,
];
if ($setting[$type . '_temperature']) {
$extras['temperature'] = floatval($setting[$type . '_temperature']);
}
if ($msg->msg['model_name']) {
$extras['model_name'] = $msg->msg['model_name'];
}

View File

@ -720,7 +720,7 @@ import touchclick from "../../../directives/touchclick";
import {languageList} from "../../../language";
import {isLocalResourcePath} from "../../../components/Replace/utils";
import emitter from "../../../store/events";
import {AIModelList} from "../../../store/utils";
import {AIModelLabel, AIModelList} from "../../../store/utils";
export default {
name: "DialogWrapper",
@ -1847,8 +1847,11 @@ export default {
quickLabel({key, label, config}) {
if (key === '~ai-model-select') {
const model = this.aiModelValue()
if (model) return model
if (config?.model) return config.model
if (model) {
label = AIModelLabel(this.dialogData.email, model)
} else if (config?.model) {
label = AIModelLabel(this.dialogData.email, config.model)
}
}
return label
},
@ -1915,7 +1918,7 @@ export default {
if (!this.isAiBot) {
return
}
const list = AIModelList(this.dialogData.email).map(value => ({label: value, value: value}))
const list = AIModelList(this.dialogData.email)
const configModel = item.config?.model
if (configModel && !list.find(({value}) => value === configModel)) {
list.unshift({label: configModel, value: configModel})

View File

@ -18,16 +18,15 @@
type="password"
:placeholder="$L(field.placeholder)"/>
</template>
<template v-else-if="field.type === 'auto-complete'">
<AutoComplete
<template v-else-if="field.type === 'model'">
<Select
v-model="formData[field.prop]"
:data="field.data"
:placeholder="$L(field.placeholder)"
:filter-method="field.noFilter ? null : filterMethod"
@on-blur="field.noFilter = true"
@on-keyup="field.noFilter = false"
@on-create="modelCreate($event, field.options)"
filterable
allow-create
transfer>
</AutoComplete>
<Option v-for="item in modelOption(formData[field.prop], field.options)" :value="item.value" :key="item.value">{{ item.label }}</Option>
</Select>
</template>
<template v-else-if="field.type === 'textarea'">
<Input
@ -93,9 +92,8 @@ export default {
{
label: '默认模型',
prop: 'openai_model',
type: 'auto-complete',
data: AIModelList('openai'),
noFilter: true,
type: 'model',
options: AIModelList('openai'),
placeholder: '请输入模型名称',
tipPrefix: '查看说明',
link: 'https://platform.openai.com/docs/models'
@ -103,7 +101,6 @@ export default {
{
label: 'Base URL',
prop: 'openai_base_url',
type: 'input',
placeholder: 'Enter base URL...',
tip: 'API请求的基础URL路径如果没有请留空'
},
@ -113,6 +110,12 @@ export default {
placeholder: '支持 http 或 socks 代理',
tip: '例如http://proxy.com 或 socks5://proxy.com'
},
{
label: 'Temperature',
prop: 'openai_temperature',
placeholder: '模型温度,低则保守,高则多样。',
tip: '例如0.7范围0-1默认0.7'
},
{
label: '默认提示词',
prop: 'openai_system',
@ -134,9 +137,8 @@ export default {
{
label: '默认模型',
prop: 'claude_model',
type: 'auto-complete',
data: AIModelList('claude'),
noFilter: true,
type: 'model',
options: AIModelList('claude'),
placeholder: '请输入模型名称',
tipPrefix: '查看说明',
link: 'https://docs.anthropic.com/en/docs/about-claude/models'
@ -147,6 +149,12 @@ export default {
placeholder: '支持 http 或 socks 代理',
tip: '例如http://proxy.com 或 socks5://proxy.com'
},
{
label: 'Temperature',
prop: 'claude_temperature',
placeholder: '模型温度,低则保守,高则多样。',
tip: '例如0.7范围0-1默认0.7'
},
{
label: '默认提示词',
prop: 'claude_system',
@ -169,9 +177,8 @@ export default {
{
label: '默认模型',
prop: 'deepseek_model',
type: 'auto-complete',
data: AIModelList('deepseek'),
noFilter: true,
type: 'model',
options: AIModelList('deepseek'),
placeholder: '请输入模型名称',
tipPrefix: '查看说明',
link: 'https://api-docs.deepseek.com/zh-cn/quick_start/pricing'
@ -189,6 +196,12 @@ export default {
placeholder: '支持 http 或 socks 代理',
tip: '例如http://proxy.com 或 socks5://proxy.com'
},
{
label: 'Temperature',
prop: 'deepseek_temperature',
placeholder: '模型温度,低则保守,高则多样。',
tip: '例如0.7范围0-1默认0.7'
},
{
label: '默认提示词',
prop: 'deepseek_system',
@ -210,9 +223,8 @@ export default {
{
label: '默认模型',
prop: 'gemini_model',
type: 'auto-complete',
data: AIModelList('gemini'),
noFilter: true,
type: 'model',
options: AIModelList('gemini'),
placeholder: '请输入模型名称',
tipPrefix: '查看说明',
link: 'https://ai.google.dev/models/gemini'
@ -223,6 +235,12 @@ export default {
placeholder: '仅支持 http 代理',
tip: '例如http://proxy.com 或 https://proxy.com'
},
{
label: 'Temperature',
prop: 'gemini_temperature',
placeholder: '模型温度,低则保守,高则多样。',
tip: '例如0.7范围0-1默认0.7'
},
{
label: '默认提示词',
prop: 'gemini_system',
@ -244,9 +262,8 @@ export default {
{
label: '默认模型',
prop: 'zhipu_model',
type: 'auto-complete',
data: AIModelList('zhipu'),
noFilter: true,
type: 'model',
options: AIModelList('zhipu'),
placeholder: '请输入模型名称',
tipPrefix: '查看说明',
link: 'https://open.bigmodel.cn/dev/api'
@ -257,6 +274,12 @@ export default {
placeholder: '支持 http 或 socks 代理',
tip: '例如http://proxy.com 或 socks5://proxy.com'
},
{
label: 'Temperature',
prop: 'zhipu_temperature',
placeholder: '模型温度,低则保守,高则多样。',
tip: '例如0.7范围0-1默认0.7'
},
{
label: '默认提示词',
prop: 'zhipu_system',
@ -278,9 +301,8 @@ export default {
{
label: '默认模型',
prop: 'qianwen_model',
type: 'auto-complete',
data: AIModelList('qianwen'),
noFilter: true,
type: 'model',
options: AIModelList('qianwen'),
placeholder: '请输入模型名称',
tipPrefix: '查看说明',
link: 'https://help.aliyun.com/zh/model-studio/getting-started/models'
@ -291,6 +313,12 @@ export default {
placeholder: '支持 http 或 socks 代理',
tip: '例如http://proxy.com 或 socks5://proxy.com'
},
{
label: 'Temperature',
prop: 'qianwen_temperature',
placeholder: '模型温度,低则保守,高则多样。',
tip: '例如0.7范围0-1默认0.7'
},
{
label: '默认提示词',
prop: 'qianwen_system',
@ -319,9 +347,8 @@ export default {
{
label: '默认模型',
prop: 'wenxin_model',
type: 'auto-complete',
data: AIModelList('wenxin'),
noFilter: true,
type: 'model',
options: AIModelList('wenxin'),
placeholder: '请输入模型名称',
tipPrefix: '查看说明',
link: 'https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Blfmc9dlf'
@ -332,6 +359,12 @@ export default {
placeholder: '支持 http 或 socks 代理',
tip: '例如http://proxy.com 或 socks5://proxy.com'
},
{
label: 'Temperature',
prop: 'wenxin_temperature',
placeholder: '模型温度,低则保守,高则多样。',
tip: '例如0.7范围0-1默认0.7'
},
{
label: '默认提示词',
prop: 'wenxin_system',
@ -351,8 +384,14 @@ export default {
...mapState(['formOptions']),
},
methods: {
filterMethod(value, option) {
return option.toUpperCase().indexOf(value.toUpperCase()) !== -1;
modelCreate(value, options) {
options.push({value, label: value});
},
modelOption(value, options) {
if (value && !options.find(item => item.value === value)) {
options.unshift({value, label: value});
}
return options;
},
submitForm() {
this.$refs.formData.validate((valid) => {

View File

@ -163,90 +163,95 @@ export class SSEClient {
}
}
export function AIModelList(email) {
switch (email) {
case "openai":
case "ai-openai@bot.system":
return [
'gpt-4',
'gpt-4-turbo',
'gpt-4o',
'gpt-4o-mini',
'gpt-3.5-turbo',
'gpt-3.5-turbo-16k',
'gpt-3.5-turbo-0125',
'gpt-3.5-turbo-1106'
]
case "claude":
case "ai-claude@bot.system":
return [
'claude-3-5-sonnet-latest',
'claude-3-5-sonnet-20241022',
'claude-3-5-haiku-latest',
'claude-3-5-haiku-20241022',
'claude-3-opus-latest',
'claude-3-opus-20240229',
'claude-3-haiku-20240307',
'claude-2.1',
'claude-2.0'
]
case "deepseek":
case "ai-deepseek@bot.system":
return [
'deepseek-chat',
'deepseek-reasoner'
]
case "wenxin":
case "ai-wenxin@bot.system":
return [
'gemini-1.5-flash',
'gemini-1.5-flash-8b',
'gemini-1.5-pro',
'gemini-1.0-pro',
]
case "qianwen":
case "ai-qianwen@bot.system":
return [
'glm-4',
'glm-4-plus',
'glm-4-air',
'glm-4-airx',
'glm-4-long',
'glm-4-flash',
'glm-4v',
'glm-4v-plus',
'glm-3-turbo'
]
case "gemini":
case "ai-gemini@bot.system":
return [
'qwen-turbo',
'qwen-turbo-latest',
'qwen-plus',
'qwen-plus-latest',
'qwen-max',
'qwen-max-latest',
'qwen-long',
]
case "zhipu":
case "ai-zhipu@bot.system":
return [
'ernie-4.0-8k',
'ernie-4.0-8k-latest',
'ernie-4.0-turbo-128k',
'ernie-4.0-turbo-8k',
'ernie-3.5-128k',
'ernie-3.5-8k',
'ernie-speed-128k',
'ernie-speed-8k',
'ernie-lite-8k',
'ernie-tiny-8k',
]
}
const __AIModelData = {
openai: [
{value: 'gpt-4', label: 'GPT-4'},
{value: 'gpt-4-turbo', label: 'GPT-4 Turbo'},
{value: 'gpt-4o', label: 'GPT-4o'},
{value: 'gpt-4o-mini', label: 'GPT-4o Mini'},
{value: 'o1', label: 'GPT-o1'},
{value: 'o1-mini', label: 'GPT-o1 Mini'},
{value: 'o3-mini', label: 'GPT-o3 Mini'},
{value: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo'},
{value: 'gpt-3.5-turbo-16k', label: 'GPT-3.5 Turbo 16K'},
{value: 'gpt-3.5-turbo-0125', label: 'GPT-3.5 Turbo 0125'},
{value: 'gpt-3.5-turbo-1106', label: 'GPT-3.5 Turbo 1106'}
],
claude: [
{value: 'claude-3-5-sonnet-latest', label: 'Claude 3.5 Sonnet'},
{value: 'claude-3-5-sonnet-20241022', label: 'Claude 3.5 Sonnet 20241022'},
{value: 'claude-3-5-haiku-latest', label: 'Claude 3.5 Haiku'},
{value: 'claude-3-5-haiku-20241022', label: 'Claude 3.5 Haiku 20241022'},
{value: 'claude-3-opus-latest', label: 'Claude 3 Opus'},
{value: 'claude-3-opus-20240229', label: 'Claude 3 Opus 20240229'},
{value: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku 20240307'},
{value: 'claude-2.1', label: 'Claude 2.1'},
{value: 'claude-2.0', label: 'Claude 2.0'}
],
deepseek: [
{value: 'deepseek-chat', label: 'DeepSeek V3'},
{value: 'deepseek-reasoner', label: 'DeepSeek R1'}
],
wenxin: [
{value: 'ernie-4.0-8k', label: 'Ernie 4.0 8K'},
{value: 'ernie-4.0-8k-latest', label: 'Ernie 4.0 8K Latest'},
{value: 'ernie-4.0-turbo-128k', label: 'Ernie 4.0 Turbo 128K'},
{value: 'ernie-4.0-turbo-8k', label: 'Ernie 4.0 Turbo 8K'},
{value: 'ernie-3.5-128k', label: 'Ernie 3.5 128K'},
{value: 'ernie-3.5-8k', label: 'Ernie 3.5 8K'},
{value: 'ernie-speed-128k', label: 'Ernie Speed 128K'},
{value: 'ernie-speed-8k', label: 'Ernie Speed 8K'},
{value: 'ernie-lite-8k', label: 'Ernie Lite 8K'},
{value: 'ernie-tiny-8k', label: 'Ernie Tiny 8K'}
],
qianwen: [
{value: 'qwen-max', label: 'QWEN Max'},
{value: 'qwen-max-latest', label: 'QWEN Max Latest'},
{value: 'qwen-turbo', label: 'QWEN Turbo'},
{value: 'qwen-turbo-latest', label: 'QWEN Turbo Latest'},
{value: 'qwen-plus', label: 'QWEN Plus'},
{value: 'qwen-plus-latest', label: 'QWEN Plus Latest'},
{value: 'qwen-long', label: 'QWEN Long'}
],
gemini: [
{value: 'gemini-2.0-flash', label: 'Gemini 2.0 Flash'},
{value: 'gemini-2.0-flash-lite-preview-02-05', label: 'Gemini 2.0 Flash-Lite Preview'},
{value: 'gemini-1.5-flash', label: 'Gemini 1.5 Flash'},
{value: 'gemini-1.5-flash-8b', label: 'Gemini 1.5 Flash 8B'},
{value: 'gemini-1.5-pro', label: 'Gemini 1.5 Pro'},
{value: 'gemini-1.0-pro', label: 'Gemini 1.0 Pro'}
],
zhipu: [
{value: 'glm-4', label: 'GLM-4'},
{value: 'glm-4-plus', label: 'GLM-4 Plus'},
{value: 'glm-4-air', label: 'GLM-4 Air'},
{value: 'glm-4-airx', label: 'GLM-4 AirX'},
{value: 'glm-4-long', label: 'GLM-4 Long'},
{value: 'glm-4-flash', label: 'GLM-4 Flash'},
{value: 'glm-4v', label: 'GLM-4V'},
{value: 'glm-4v-plus', label: 'GLM-4V Plus'},
{value: 'glm-3-turbo', label: 'GLM-3 Turbo'}
]
}
const AIModelList = (email) => {
const emailMap = {
"ai-openai@bot.system": "openai",
"ai-claude@bot.system": "claude",
"ai-deepseek@bot.system": "deepseek",
"ai-wenxin@bot.system": "wenxin",
"ai-qianwen@bot.system": "qianwen",
"ai-gemini@bot.system": "gemini",
"ai-zhipu@bot.system": "zhipu"
};
email = emailMap[email] || email;
return __AIModelData[email] || []
}
const AIModelLabel = (email, model) => {
const item = AIModelList(email).find(item => item.value === model)
return item ? item.label : model
}
export {AIModelList, AIModelLabel}