From 8636fadc72535dc19d09b11312d797ce4d306db1 Mon Sep 17 00:00:00 2001 From: mcz Date: Tue, 3 Jun 2025 23:07:10 +0800 Subject: [PATCH] =?UTF-8?q?=E6=8E=A5=E5=8F=A3=E8=AE=BE=E7=BD=AEollama?= =?UTF-8?q?=E6=96=B0=E5=A2=9E=E6=98=AF=E5=90=A6=E7=A6=81=E7=94=A8=E6=B7=B1?= =?UTF-8?q?=E5=BA=A6=E6=80=9D=E8=80=83=E5=8F=82=E6=95=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/apis/trans.js | 3 ++- src/config/i18n.js | 14 +++++++++++++- src/config/index.js | 1 + src/views/Options/Apis.js | 13 +++++++++++++ 4 files changed, 29 insertions(+), 2 deletions(-) diff --git a/src/apis/trans.js b/src/apis/trans.js index b833d07..3109252 100644 --- a/src/apis/trans.js +++ b/src/apis/trans.js @@ -314,7 +314,7 @@ const genClaude = ({ return [url, init]; }; -const genOllama = ({ text, from, to, url, key, systemPrompt, userPrompt, model }) => { +const genOllama = ({ text, from, to, think,url, key, systemPrompt, userPrompt,model }) => { systemPrompt = systemPrompt .replaceAll(INPUT_PLACE_FROM, from) .replaceAll(INPUT_PLACE_TO, to) @@ -328,6 +328,7 @@ const genOllama = ({ text, from, to, url, key, systemPrompt, userPrompt, model } model, system: systemPrompt, prompt: userPrompt, + think: think, stream: false, }; diff --git a/src/config/i18n.js b/src/config/i18n.js index f69ae44..f56ebee 100644 --- a/src/config/i18n.js +++ b/src/config/i18n.js @@ -186,8 +186,20 @@ export const I18N = { zh: `最大并发请求数量 (1-100)`, en: `Maximum Number Of Concurrent Requests (1-100)`, }, + if_think: { + zh: `启用或禁用模型的深度思考能力`, + en: `Enable or disable the model’s thinking behavior `, + }, + think: { + zh: `启用深度思考`, + en: `enable thinking`, + }, + nothink: { + zh: `禁用深度思考`, + en: `disable thinking`, + }, think_ignore: { - zh: `忽略以下模型的输出,逗号(,)分割`, + zh: `忽略以下模型的输出,逗号(,)分割,当模型支持思考但ollama不支持时需要填写本参数`, en: `Ignore the block for the following models, comma (,) separated`, }, fetch_interval: { diff --git a/src/config/index.js b/src/config/index.js index 5aa6db9..19281d4 100644 --- a/src/config/index.js +++ b/src/config/index.js @@ -543,6 +543,7 @@ const defaultOllamaApi = { model: "llama3.1", systemPrompt: `You are a professional, authentic machine translation engine.`, userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`, + think:false, thinkIgnore:`qwen3,deepseek-r1`, fetchLimit: 1, fetchInterval: 500, diff --git a/src/views/Options/Apis.js b/src/views/Options/Apis.js index af66062..ee06beb 100644 --- a/src/views/Options/Apis.js +++ b/src/views/Options/Apis.js @@ -2,6 +2,7 @@ import Stack from "@mui/material/Stack"; import TextField from "@mui/material/TextField"; import Button from "@mui/material/Button"; import LoadingButton from "@mui/lab/LoadingButton"; +import MenuItem from "@mui/material/MenuItem"; import { OPT_TRANS_ALL, OPT_TRANS_MICROSOFT, @@ -117,6 +118,7 @@ function ApiFields({ translator }) { model = "", systemPrompt = "", userPrompt = "", + think="", thinkIgnore = "", fetchLimit = DEFAULT_FETCH_LIMIT, fetchInterval = DEFAULT_FETCH_INTERVAL, @@ -249,6 +251,17 @@ function ApiFields({ translator }) { {(translator.startsWith(OPT_TRANS_OLLAMA)) && ( <> + + {i18n("nothink")} + {i18n("think")} +