接口设置ollama新增是否禁用深度思考参数
This commit is contained in:
@@ -186,8 +186,20 @@ export const I18N = {
|
||||
zh: `最大并发请求数量 (1-100)`,
|
||||
en: `Maximum Number Of Concurrent Requests (1-100)`,
|
||||
},
|
||||
if_think: {
|
||||
zh: `启用或禁用模型的深度思考能力`,
|
||||
en: `Enable or disable the model’s thinking behavior `,
|
||||
},
|
||||
think: {
|
||||
zh: `启用深度思考`,
|
||||
en: `enable thinking`,
|
||||
},
|
||||
nothink: {
|
||||
zh: `禁用深度思考`,
|
||||
en: `disable thinking`,
|
||||
},
|
||||
think_ignore: {
|
||||
zh: `忽略以下模型的<think>输出,逗号(,)分割`,
|
||||
zh: `忽略以下模型的<think>输出,逗号(,)分割,当模型支持思考但ollama不支持时需要填写本参数`,
|
||||
en: `Ignore the <think> block for the following models, comma (,) separated`,
|
||||
},
|
||||
fetch_interval: {
|
||||
|
||||
@@ -543,6 +543,7 @@ const defaultOllamaApi = {
|
||||
model: "llama3.1",
|
||||
systemPrompt: `You are a professional, authentic machine translation engine.`,
|
||||
userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`,
|
||||
think:false,
|
||||
thinkIgnore:`qwen3,deepseek-r1`,
|
||||
fetchLimit: 1,
|
||||
fetchInterval: 500,
|
||||
|
||||
Reference in New Issue
Block a user