diff --git a/src/apis/trans.js b/src/apis/trans.js index b833d07..3109252 100644 --- a/src/apis/trans.js +++ b/src/apis/trans.js @@ -314,7 +314,7 @@ const genClaude = ({ return [url, init]; }; -const genOllama = ({ text, from, to, url, key, systemPrompt, userPrompt, model }) => { +const genOllama = ({ text, from, to, think,url, key, systemPrompt, userPrompt,model }) => { systemPrompt = systemPrompt .replaceAll(INPUT_PLACE_FROM, from) .replaceAll(INPUT_PLACE_TO, to) @@ -328,6 +328,7 @@ const genOllama = ({ text, from, to, url, key, systemPrompt, userPrompt, model } model, system: systemPrompt, prompt: userPrompt, + think: think, stream: false, }; diff --git a/src/config/i18n.js b/src/config/i18n.js index f69ae44..f56ebee 100644 --- a/src/config/i18n.js +++ b/src/config/i18n.js @@ -186,8 +186,20 @@ export const I18N = { zh: `最大并发请求数量 (1-100)`, en: `Maximum Number Of Concurrent Requests (1-100)`, }, + if_think: { + zh: `启用或禁用模型的深度思考能力`, + en: `Enable or disable the model’s thinking behavior `, + }, + think: { + zh: `启用深度思考`, + en: `enable thinking`, + }, + nothink: { + zh: `禁用深度思考`, + en: `disable thinking`, + }, think_ignore: { - zh: `忽略以下模型的输出,逗号(,)分割`, + zh: `忽略以下模型的输出,逗号(,)分割,当模型支持思考但ollama不支持时需要填写本参数`, en: `Ignore the block for the following models, comma (,) separated`, }, fetch_interval: { diff --git a/src/config/index.js b/src/config/index.js index 5aa6db9..19281d4 100644 --- a/src/config/index.js +++ b/src/config/index.js @@ -543,6 +543,7 @@ const defaultOllamaApi = { model: "llama3.1", systemPrompt: `You are a professional, authentic machine translation engine.`, userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`, + think:false, thinkIgnore:`qwen3,deepseek-r1`, fetchLimit: 1, fetchInterval: 500, diff --git a/src/views/Options/Apis.js b/src/views/Options/Apis.js index af66062..ee06beb 100644 --- a/src/views/Options/Apis.js +++ b/src/views/Options/Apis.js @@ -2,6 +2,7 @@ import Stack from "@mui/material/Stack"; import TextField from "@mui/material/TextField"; import Button from "@mui/material/Button"; import LoadingButton from "@mui/lab/LoadingButton"; +import MenuItem from "@mui/material/MenuItem"; import { OPT_TRANS_ALL, OPT_TRANS_MICROSOFT, @@ -117,6 +118,7 @@ function ApiFields({ translator }) { model = "", systemPrompt = "", userPrompt = "", + think="", thinkIgnore = "", fetchLimit = DEFAULT_FETCH_LIMIT, fetchInterval = DEFAULT_FETCH_INTERVAL, @@ -249,6 +251,17 @@ function ApiFields({ translator }) { {(translator.startsWith(OPT_TRANS_OLLAMA)) && ( <> + + {i18n("nothink")} + {i18n("think")} +