perf: AI机器人支持多会话

This commit is contained in:
kuaifan 2025-02-10 19:49:30 +09:00
parent f0b5e0c3b9
commit ffe7ebf711
5 changed files with 176 additions and 122 deletions

View File

@ -66,10 +66,14 @@ class Setting extends AbstractModel
} }
$array = []; $array = [];
$aiList = ['openai', 'claude', 'deepseek', 'gemini', 'zhipu', 'qianwen', 'wenxin']; $aiList = ['openai', 'claude', 'deepseek', 'gemini', 'zhipu', 'qianwen', 'wenxin'];
$fieldList = ['key', 'model', 'base_url', 'agency', 'system', 'secret']; $fieldList = ['key', 'model', 'base_url', 'agency', 'temperature', 'system', 'secret'];
foreach ($aiList as $aiName) { foreach ($aiList as $aiName) {
foreach ($fieldList as $fieldName) { foreach ($fieldList as $fieldName) {
$key = $aiName . '_' . $fieldName; $key = $aiName . '_' . $fieldName;
if ($fieldName == 'temperature' && $value[$key]) {
$array[$key] = floatval(min(1, max(0, floatval($value[$key]) ?: 0.7)));
continue;
}
$array[$key] = $value[$key] ?: match ($key) { $array[$key] = $value[$key] ?: match ($key) {
'openai_model' => 'gpt-4o-mini', 'openai_model' => 'gpt-4o-mini',
'claude_model' => 'claude-3-5-sonnet-latest', 'claude_model' => 'claude-3-5-sonnet-latest',

View File

@ -431,6 +431,9 @@ class BotReceiveMsgTask extends AbstractTask
'agency' => $setting[$type . '_agency'], 'agency' => $setting[$type . '_agency'],
'server_url' => $serverUrl, 'server_url' => $serverUrl,
]; ];
if ($setting[$type . '_temperature']) {
$extras['temperature'] = floatval($setting[$type . '_temperature']);
}
if ($msg->msg['model_name']) { if ($msg->msg['model_name']) {
$extras['model_name'] = $msg->msg['model_name']; $extras['model_name'] = $msg->msg['model_name'];
} }

View File

@ -720,7 +720,7 @@ import touchclick from "../../../directives/touchclick";
import {languageList} from "../../../language"; import {languageList} from "../../../language";
import {isLocalResourcePath} from "../../../components/Replace/utils"; import {isLocalResourcePath} from "../../../components/Replace/utils";
import emitter from "../../../store/events"; import emitter from "../../../store/events";
import {AIModelList} from "../../../store/utils"; import {AIModelLabel, AIModelList} from "../../../store/utils";
export default { export default {
name: "DialogWrapper", name: "DialogWrapper",
@ -1847,8 +1847,11 @@ export default {
quickLabel({key, label, config}) { quickLabel({key, label, config}) {
if (key === '~ai-model-select') { if (key === '~ai-model-select') {
const model = this.aiModelValue() const model = this.aiModelValue()
if (model) return model if (model) {
if (config?.model) return config.model label = AIModelLabel(this.dialogData.email, model)
} else if (config?.model) {
label = AIModelLabel(this.dialogData.email, config.model)
}
} }
return label return label
}, },
@ -1915,7 +1918,7 @@ export default {
if (!this.isAiBot) { if (!this.isAiBot) {
return return
} }
const list = AIModelList(this.dialogData.email).map(value => ({label: value, value: value})) const list = AIModelList(this.dialogData.email)
const configModel = item.config?.model const configModel = item.config?.model
if (configModel && !list.find(({value}) => value === configModel)) { if (configModel && !list.find(({value}) => value === configModel)) {
list.unshift({label: configModel, value: configModel}) list.unshift({label: configModel, value: configModel})

View File

@ -18,16 +18,15 @@
type="password" type="password"
:placeholder="$L(field.placeholder)"/> :placeholder="$L(field.placeholder)"/>
</template> </template>
<template v-else-if="field.type === 'auto-complete'"> <template v-else-if="field.type === 'model'">
<AutoComplete <Select
v-model="formData[field.prop]" v-model="formData[field.prop]"
:data="field.data" @on-create="modelCreate($event, field.options)"
:placeholder="$L(field.placeholder)" filterable
:filter-method="field.noFilter ? null : filterMethod" allow-create
@on-blur="field.noFilter = true"
@on-keyup="field.noFilter = false"
transfer> transfer>
</AutoComplete> <Option v-for="item in modelOption(formData[field.prop], field.options)" :value="item.value" :key="item.value">{{ item.label }}</Option>
</Select>
</template> </template>
<template v-else-if="field.type === 'textarea'"> <template v-else-if="field.type === 'textarea'">
<Input <Input
@ -93,9 +92,8 @@ export default {
{ {
label: '默认模型', label: '默认模型',
prop: 'openai_model', prop: 'openai_model',
type: 'auto-complete', type: 'model',
data: AIModelList('openai'), options: AIModelList('openai'),
noFilter: true,
placeholder: '请输入模型名称', placeholder: '请输入模型名称',
tipPrefix: '查看说明', tipPrefix: '查看说明',
link: 'https://platform.openai.com/docs/models' link: 'https://platform.openai.com/docs/models'
@ -103,7 +101,6 @@ export default {
{ {
label: 'Base URL', label: 'Base URL',
prop: 'openai_base_url', prop: 'openai_base_url',
type: 'input',
placeholder: 'Enter base URL...', placeholder: 'Enter base URL...',
tip: 'API请求的基础URL路径如果没有请留空' tip: 'API请求的基础URL路径如果没有请留空'
}, },
@ -113,6 +110,12 @@ export default {
placeholder: '支持 http 或 socks 代理', placeholder: '支持 http 或 socks 代理',
tip: '例如http://proxy.com 或 socks5://proxy.com' tip: '例如http://proxy.com 或 socks5://proxy.com'
}, },
{
label: 'Temperature',
prop: 'openai_temperature',
placeholder: '模型温度,低则保守,高则多样。',
tip: '例如0.7范围0-1默认0.7'
},
{ {
label: '默认提示词', label: '默认提示词',
prop: 'openai_system', prop: 'openai_system',
@ -134,9 +137,8 @@ export default {
{ {
label: '默认模型', label: '默认模型',
prop: 'claude_model', prop: 'claude_model',
type: 'auto-complete', type: 'model',
data: AIModelList('claude'), options: AIModelList('claude'),
noFilter: true,
placeholder: '请输入模型名称', placeholder: '请输入模型名称',
tipPrefix: '查看说明', tipPrefix: '查看说明',
link: 'https://docs.anthropic.com/en/docs/about-claude/models' link: 'https://docs.anthropic.com/en/docs/about-claude/models'
@ -147,6 +149,12 @@ export default {
placeholder: '支持 http 或 socks 代理', placeholder: '支持 http 或 socks 代理',
tip: '例如http://proxy.com 或 socks5://proxy.com' tip: '例如http://proxy.com 或 socks5://proxy.com'
}, },
{
label: 'Temperature',
prop: 'claude_temperature',
placeholder: '模型温度,低则保守,高则多样。',
tip: '例如0.7范围0-1默认0.7'
},
{ {
label: '默认提示词', label: '默认提示词',
prop: 'claude_system', prop: 'claude_system',
@ -169,9 +177,8 @@ export default {
{ {
label: '默认模型', label: '默认模型',
prop: 'deepseek_model', prop: 'deepseek_model',
type: 'auto-complete', type: 'model',
data: AIModelList('deepseek'), options: AIModelList('deepseek'),
noFilter: true,
placeholder: '请输入模型名称', placeholder: '请输入模型名称',
tipPrefix: '查看说明', tipPrefix: '查看说明',
link: 'https://api-docs.deepseek.com/zh-cn/quick_start/pricing' link: 'https://api-docs.deepseek.com/zh-cn/quick_start/pricing'
@ -189,6 +196,12 @@ export default {
placeholder: '支持 http 或 socks 代理', placeholder: '支持 http 或 socks 代理',
tip: '例如http://proxy.com 或 socks5://proxy.com' tip: '例如http://proxy.com 或 socks5://proxy.com'
}, },
{
label: 'Temperature',
prop: 'deepseek_temperature',
placeholder: '模型温度,低则保守,高则多样。',
tip: '例如0.7范围0-1默认0.7'
},
{ {
label: '默认提示词', label: '默认提示词',
prop: 'deepseek_system', prop: 'deepseek_system',
@ -210,9 +223,8 @@ export default {
{ {
label: '默认模型', label: '默认模型',
prop: 'gemini_model', prop: 'gemini_model',
type: 'auto-complete', type: 'model',
data: AIModelList('gemini'), options: AIModelList('gemini'),
noFilter: true,
placeholder: '请输入模型名称', placeholder: '请输入模型名称',
tipPrefix: '查看说明', tipPrefix: '查看说明',
link: 'https://ai.google.dev/models/gemini' link: 'https://ai.google.dev/models/gemini'
@ -223,6 +235,12 @@ export default {
placeholder: '仅支持 http 代理', placeholder: '仅支持 http 代理',
tip: '例如http://proxy.com 或 https://proxy.com' tip: '例如http://proxy.com 或 https://proxy.com'
}, },
{
label: 'Temperature',
prop: 'gemini_temperature',
placeholder: '模型温度,低则保守,高则多样。',
tip: '例如0.7范围0-1默认0.7'
},
{ {
label: '默认提示词', label: '默认提示词',
prop: 'gemini_system', prop: 'gemini_system',
@ -244,9 +262,8 @@ export default {
{ {
label: '默认模型', label: '默认模型',
prop: 'zhipu_model', prop: 'zhipu_model',
type: 'auto-complete', type: 'model',
data: AIModelList('zhipu'), options: AIModelList('zhipu'),
noFilter: true,
placeholder: '请输入模型名称', placeholder: '请输入模型名称',
tipPrefix: '查看说明', tipPrefix: '查看说明',
link: 'https://open.bigmodel.cn/dev/api' link: 'https://open.bigmodel.cn/dev/api'
@ -257,6 +274,12 @@ export default {
placeholder: '支持 http 或 socks 代理', placeholder: '支持 http 或 socks 代理',
tip: '例如http://proxy.com 或 socks5://proxy.com' tip: '例如http://proxy.com 或 socks5://proxy.com'
}, },
{
label: 'Temperature',
prop: 'zhipu_temperature',
placeholder: '模型温度,低则保守,高则多样。',
tip: '例如0.7范围0-1默认0.7'
},
{ {
label: '默认提示词', label: '默认提示词',
prop: 'zhipu_system', prop: 'zhipu_system',
@ -278,9 +301,8 @@ export default {
{ {
label: '默认模型', label: '默认模型',
prop: 'qianwen_model', prop: 'qianwen_model',
type: 'auto-complete', type: 'model',
data: AIModelList('qianwen'), options: AIModelList('qianwen'),
noFilter: true,
placeholder: '请输入模型名称', placeholder: '请输入模型名称',
tipPrefix: '查看说明', tipPrefix: '查看说明',
link: 'https://help.aliyun.com/zh/model-studio/getting-started/models' link: 'https://help.aliyun.com/zh/model-studio/getting-started/models'
@ -291,6 +313,12 @@ export default {
placeholder: '支持 http 或 socks 代理', placeholder: '支持 http 或 socks 代理',
tip: '例如http://proxy.com 或 socks5://proxy.com' tip: '例如http://proxy.com 或 socks5://proxy.com'
}, },
{
label: 'Temperature',
prop: 'qianwen_temperature',
placeholder: '模型温度,低则保守,高则多样。',
tip: '例如0.7范围0-1默认0.7'
},
{ {
label: '默认提示词', label: '默认提示词',
prop: 'qianwen_system', prop: 'qianwen_system',
@ -319,9 +347,8 @@ export default {
{ {
label: '默认模型', label: '默认模型',
prop: 'wenxin_model', prop: 'wenxin_model',
type: 'auto-complete', type: 'model',
data: AIModelList('wenxin'), options: AIModelList('wenxin'),
noFilter: true,
placeholder: '请输入模型名称', placeholder: '请输入模型名称',
tipPrefix: '查看说明', tipPrefix: '查看说明',
link: 'https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Blfmc9dlf' link: 'https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Blfmc9dlf'
@ -332,6 +359,12 @@ export default {
placeholder: '支持 http 或 socks 代理', placeholder: '支持 http 或 socks 代理',
tip: '例如http://proxy.com 或 socks5://proxy.com' tip: '例如http://proxy.com 或 socks5://proxy.com'
}, },
{
label: 'Temperature',
prop: 'wenxin_temperature',
placeholder: '模型温度,低则保守,高则多样。',
tip: '例如0.7范围0-1默认0.7'
},
{ {
label: '默认提示词', label: '默认提示词',
prop: 'wenxin_system', prop: 'wenxin_system',
@ -351,8 +384,14 @@ export default {
...mapState(['formOptions']), ...mapState(['formOptions']),
}, },
methods: { methods: {
filterMethod(value, option) { modelCreate(value, options) {
return option.toUpperCase().indexOf(value.toUpperCase()) !== -1; options.push({value, label: value});
},
modelOption(value, options) {
if (value && !options.find(item => item.value === value)) {
options.unshift({value, label: value});
}
return options;
}, },
submitForm() { submitForm() {
this.$refs.formData.validate((valid) => { this.$refs.formData.validate((valid) => {

View File

@ -163,90 +163,95 @@ export class SSEClient {
} }
} }
export function AIModelList(email) {
switch (email) {
case "openai":
case "ai-openai@bot.system":
return [
'gpt-4',
'gpt-4-turbo',
'gpt-4o',
'gpt-4o-mini',
'gpt-3.5-turbo',
'gpt-3.5-turbo-16k',
'gpt-3.5-turbo-0125',
'gpt-3.5-turbo-1106'
]
case "claude": const __AIModelData = {
case "ai-claude@bot.system": openai: [
return [ {value: 'gpt-4', label: 'GPT-4'},
'claude-3-5-sonnet-latest', {value: 'gpt-4-turbo', label: 'GPT-4 Turbo'},
'claude-3-5-sonnet-20241022', {value: 'gpt-4o', label: 'GPT-4o'},
'claude-3-5-haiku-latest', {value: 'gpt-4o-mini', label: 'GPT-4o Mini'},
'claude-3-5-haiku-20241022', {value: 'o1', label: 'GPT-o1'},
'claude-3-opus-latest', {value: 'o1-mini', label: 'GPT-o1 Mini'},
'claude-3-opus-20240229', {value: 'o3-mini', label: 'GPT-o3 Mini'},
'claude-3-haiku-20240307', {value: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo'},
'claude-2.1', {value: 'gpt-3.5-turbo-16k', label: 'GPT-3.5 Turbo 16K'},
'claude-2.0' {value: 'gpt-3.5-turbo-0125', label: 'GPT-3.5 Turbo 0125'},
] {value: 'gpt-3.5-turbo-1106', label: 'GPT-3.5 Turbo 1106'}
],
case "deepseek": claude: [
case "ai-deepseek@bot.system": {value: 'claude-3-5-sonnet-latest', label: 'Claude 3.5 Sonnet'},
return [ {value: 'claude-3-5-sonnet-20241022', label: 'Claude 3.5 Sonnet 20241022'},
'deepseek-chat', {value: 'claude-3-5-haiku-latest', label: 'Claude 3.5 Haiku'},
'deepseek-reasoner' {value: 'claude-3-5-haiku-20241022', label: 'Claude 3.5 Haiku 20241022'},
] {value: 'claude-3-opus-latest', label: 'Claude 3 Opus'},
{value: 'claude-3-opus-20240229', label: 'Claude 3 Opus 20240229'},
case "wenxin": {value: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku 20240307'},
case "ai-wenxin@bot.system": {value: 'claude-2.1', label: 'Claude 2.1'},
return [ {value: 'claude-2.0', label: 'Claude 2.0'}
'gemini-1.5-flash', ],
'gemini-1.5-flash-8b', deepseek: [
'gemini-1.5-pro', {value: 'deepseek-chat', label: 'DeepSeek V3'},
'gemini-1.0-pro', {value: 'deepseek-reasoner', label: 'DeepSeek R1'}
] ],
wenxin: [
case "qianwen": {value: 'ernie-4.0-8k', label: 'Ernie 4.0 8K'},
case "ai-qianwen@bot.system": {value: 'ernie-4.0-8k-latest', label: 'Ernie 4.0 8K Latest'},
return [ {value: 'ernie-4.0-turbo-128k', label: 'Ernie 4.0 Turbo 128K'},
'glm-4', {value: 'ernie-4.0-turbo-8k', label: 'Ernie 4.0 Turbo 8K'},
'glm-4-plus', {value: 'ernie-3.5-128k', label: 'Ernie 3.5 128K'},
'glm-4-air', {value: 'ernie-3.5-8k', label: 'Ernie 3.5 8K'},
'glm-4-airx', {value: 'ernie-speed-128k', label: 'Ernie Speed 128K'},
'glm-4-long', {value: 'ernie-speed-8k', label: 'Ernie Speed 8K'},
'glm-4-flash', {value: 'ernie-lite-8k', label: 'Ernie Lite 8K'},
'glm-4v', {value: 'ernie-tiny-8k', label: 'Ernie Tiny 8K'}
'glm-4v-plus', ],
'glm-3-turbo' qianwen: [
] {value: 'qwen-max', label: 'QWEN Max'},
{value: 'qwen-max-latest', label: 'QWEN Max Latest'},
case "gemini": {value: 'qwen-turbo', label: 'QWEN Turbo'},
case "ai-gemini@bot.system": {value: 'qwen-turbo-latest', label: 'QWEN Turbo Latest'},
return [ {value: 'qwen-plus', label: 'QWEN Plus'},
'qwen-turbo', {value: 'qwen-plus-latest', label: 'QWEN Plus Latest'},
'qwen-turbo-latest', {value: 'qwen-long', label: 'QWEN Long'}
'qwen-plus', ],
'qwen-plus-latest', gemini: [
'qwen-max', {value: 'gemini-2.0-flash', label: 'Gemini 2.0 Flash'},
'qwen-max-latest', {value: 'gemini-2.0-flash-lite-preview-02-05', label: 'Gemini 2.0 Flash-Lite Preview'},
'qwen-long', {value: 'gemini-1.5-flash', label: 'Gemini 1.5 Flash'},
] {value: 'gemini-1.5-flash-8b', label: 'Gemini 1.5 Flash 8B'},
{value: 'gemini-1.5-pro', label: 'Gemini 1.5 Pro'},
case "zhipu": {value: 'gemini-1.0-pro', label: 'Gemini 1.0 Pro'}
case "ai-zhipu@bot.system": ],
return [ zhipu: [
'ernie-4.0-8k', {value: 'glm-4', label: 'GLM-4'},
'ernie-4.0-8k-latest', {value: 'glm-4-plus', label: 'GLM-4 Plus'},
'ernie-4.0-turbo-128k', {value: 'glm-4-air', label: 'GLM-4 Air'},
'ernie-4.0-turbo-8k', {value: 'glm-4-airx', label: 'GLM-4 AirX'},
'ernie-3.5-128k', {value: 'glm-4-long', label: 'GLM-4 Long'},
'ernie-3.5-8k', {value: 'glm-4-flash', label: 'GLM-4 Flash'},
'ernie-speed-128k', {value: 'glm-4v', label: 'GLM-4V'},
'ernie-speed-8k', {value: 'glm-4v-plus', label: 'GLM-4V Plus'},
'ernie-lite-8k', {value: 'glm-3-turbo', label: 'GLM-3 Turbo'}
'ernie-tiny-8k', ]
]
}
} }
const AIModelList = (email) => {
const emailMap = {
"ai-openai@bot.system": "openai",
"ai-claude@bot.system": "claude",
"ai-deepseek@bot.system": "deepseek",
"ai-wenxin@bot.system": "wenxin",
"ai-qianwen@bot.system": "qianwen",
"ai-gemini@bot.system": "gemini",
"ai-zhipu@bot.system": "zhipu"
};
email = emailMap[email] || email;
return __AIModelData[email] || []
}
const AIModelLabel = (email, model) => {
const item = AIModelList(email).find(item => item.value === model)
return item ? item.label : model
}
export {AIModelList, AIModelLabel}