Merge pull request #240 from unclemcz/dev

feat:ollama接口设置新增是否禁用深度思考参数
This commit is contained in:
Gabe
2025-06-25 20:42:31 +08:00
committed by GitHub
4 changed files with 29 additions and 2 deletions

View File

@@ -314,7 +314,7 @@ const genClaude = ({
return [url, init];
};
const genOllama = ({ text, from, to, url, key, systemPrompt, userPrompt, model }) => {
const genOllama = ({ text, from, to, think,url, key, systemPrompt, userPrompt,model }) => {
systemPrompt = systemPrompt
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
@@ -328,6 +328,7 @@ const genOllama = ({ text, from, to, url, key, systemPrompt, userPrompt, model }
model,
system: systemPrompt,
prompt: userPrompt,
think: think,
stream: false,
};

View File

@@ -186,8 +186,20 @@ export const I18N = {
zh: `最大并发请求数量 (1-100)`,
en: `Maximum Number Of Concurrent Requests (1-100)`,
},
if_think: {
zh: `启用或禁用模型的深度思考能力`,
en: `Enable or disable the models thinking behavior `,
},
think: {
zh: `启用深度思考`,
en: `enable thinking`,
},
nothink: {
zh: `禁用深度思考`,
en: `disable thinking`,
},
think_ignore: {
zh: `忽略以下模型的<think>输出,逗号(,)分割`,
zh: `忽略以下模型的<think>输出,逗号(,)分割,当模型支持思考但ollama不支持时需要填写本参数`,
en: `Ignore the <think> block for the following models, comma (,) separated`,
},
fetch_interval: {

View File

@@ -543,6 +543,7 @@ const defaultOllamaApi = {
model: "llama3.1",
systemPrompt: `You are a professional, authentic machine translation engine.`,
userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`,
think:false,
thinkIgnore:`qwen3,deepseek-r1`,
fetchLimit: 1,
fetchInterval: 500,

View File

@@ -2,6 +2,7 @@ import Stack from "@mui/material/Stack";
import TextField from "@mui/material/TextField";
import Button from "@mui/material/Button";
import LoadingButton from "@mui/lab/LoadingButton";
import MenuItem from "@mui/material/MenuItem";
import {
OPT_TRANS_ALL,
OPT_TRANS_MICROSOFT,
@@ -117,6 +118,7 @@ function ApiFields({ translator }) {
model = "",
systemPrompt = "",
userPrompt = "",
think="",
thinkIgnore = "",
fetchLimit = DEFAULT_FETCH_LIMIT,
fetchInterval = DEFAULT_FETCH_INTERVAL,
@@ -249,6 +251,17 @@ function ApiFields({ translator }) {
{(translator.startsWith(OPT_TRANS_OLLAMA)) && (
<>
<TextField
select
size="small"
name="think"
value={think}
label={i18n("if_think")}
onChange={handleChange}
>
<MenuItem value={false}>{i18n("nothink")}</MenuItem>
<MenuItem value={true}>{i18n("think")}</MenuItem>
</TextField>
<TextField
size="small"
label={i18n("think_ignore")}