Merge pull request #272 from FlyLoongZ/master

添加AI API 接口参数自定义(Body和Header)
This commit is contained in:
Gabe
2025-08-11 22:47:33 +08:00
committed by GitHub
4 changed files with 83 additions and 0 deletions

View File

@@ -240,6 +240,8 @@ const genOpenAI = ({
model, model,
temperature, temperature,
maxTokens, maxTokens,
customHeader,
customBody,
}) => { }) => {
// 兼容历史上作为systemPrompt的prompt如果prompt中不包含带翻译文本则添加文本到prompt末尾 // 兼容历史上作为systemPrompt的prompt如果prompt中不包含带翻译文本则添加文本到prompt末尾
// if (!prompt.includes(INPUT_PLACE_TEXT)) { // if (!prompt.includes(INPUT_PLACE_TEXT)) {
@@ -254,6 +256,9 @@ const genOpenAI = ({
.replaceAll(INPUT_PLACE_TO, to) .replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text); .replaceAll(INPUT_PLACE_TEXT, text);
customHeader = JSON.parse("{" + customHeader + "}");
customBody = JSON.parse("{" + customBody + "}");
const data = { const data = {
model, model,
messages: [ messages: [
@@ -268,6 +273,7 @@ const genOpenAI = ({
], ],
temperature, temperature,
max_completion_tokens: maxTokens, max_completion_tokens: maxTokens,
...customBody,
}; };
const init = { const init = {
@@ -275,6 +281,7 @@ const genOpenAI = ({
"Content-type": "application/json", "Content-type": "application/json",
Authorization: `Bearer ${key}`, // OpenAI Authorization: `Bearer ${key}`, // OpenAI
"api-key": key, // Azure OpenAI "api-key": key, // Azure OpenAI
...customHeader,
}, },
method: "POST", method: "POST",
body: JSON.stringify(data), body: JSON.stringify(data),
@@ -294,6 +301,8 @@ const genGemini = ({
model, model,
temperature, temperature,
maxTokens, maxTokens,
customHeader,
customBody,
}) => { }) => {
url = url url = url
.replaceAll(INPUT_PLACE_MODEL, model) .replaceAll(INPUT_PLACE_MODEL, model)
@@ -307,6 +316,9 @@ const genGemini = ({
.replaceAll(INPUT_PLACE_TO, to) .replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text); .replaceAll(INPUT_PLACE_TEXT, text);
customHeader = JSON.parse("{" + customHeader + "}");
customBody = JSON.parse("{" + customBody + "}");
const data = { const data = {
system_instruction: { system_instruction: {
parts: { parts: {
@@ -325,11 +337,13 @@ const genGemini = ({
// topP: 0.8, // topP: 0.8,
// topK: 10, // topK: 10,
}, },
...customBody,
}; };
const init = { const init = {
headers: { headers: {
"Content-type": "application/json", "Content-type": "application/json",
...customHeader,
}, },
method: "POST", method: "POST",
body: JSON.stringify(data), body: JSON.stringify(data),
@@ -349,6 +363,8 @@ const genGemini2 = ({
model, model,
temperature, temperature,
maxTokens, maxTokens,
customHeader,
customBody,
}) => { }) => {
systemPrompt = systemPrompt systemPrompt = systemPrompt
.replaceAll(INPUT_PLACE_FROM, from) .replaceAll(INPUT_PLACE_FROM, from)
@@ -359,6 +375,9 @@ const genGemini2 = ({
.replaceAll(INPUT_PLACE_TO, to) .replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text); .replaceAll(INPUT_PLACE_TEXT, text);
customHeader = JSON.parse("{" + customHeader + "}");
customBody = JSON.parse("{" + customBody + "}");
const data = { const data = {
model, model,
messages: [ messages: [
@@ -373,12 +392,14 @@ const genGemini2 = ({
], ],
temperature, temperature,
max_tokens: maxTokens, max_tokens: maxTokens,
...customBody,
}; };
const init = { const init = {
headers: { headers: {
"Content-type": "application/json", "Content-type": "application/json",
Authorization: `Bearer ${key}`, Authorization: `Bearer ${key}`,
...customHeader,
}, },
method: "POST", method: "POST",
body: JSON.stringify(data), body: JSON.stringify(data),
@@ -398,6 +419,8 @@ const genClaude = ({
model, model,
temperature, temperature,
maxTokens, maxTokens,
customHeader,
customBody,
}) => { }) => {
systemPrompt = systemPrompt systemPrompt = systemPrompt
.replaceAll(INPUT_PLACE_FROM, from) .replaceAll(INPUT_PLACE_FROM, from)
@@ -408,6 +431,9 @@ const genClaude = ({
.replaceAll(INPUT_PLACE_TO, to) .replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text); .replaceAll(INPUT_PLACE_TEXT, text);
customHeader = JSON.parse("{" + customHeader + "}");
customBody = JSON.parse("{" + customBody + "}");
const data = { const data = {
model, model,
system: systemPrompt, system: systemPrompt,
@@ -419,6 +445,7 @@ const genClaude = ({
], ],
temperature, temperature,
max_tokens: maxTokens, max_tokens: maxTokens,
...customBody,
}; };
const init = { const init = {
@@ -426,6 +453,7 @@ const genClaude = ({
"Content-type": "application/json", "Content-type": "application/json",
"anthropic-version": "2023-06-01", "anthropic-version": "2023-06-01",
"x-api-key": key, "x-api-key": key,
...customHeader,
}, },
method: "POST", method: "POST",
body: JSON.stringify(data), body: JSON.stringify(data),
@@ -444,6 +472,8 @@ const genOllama = ({
systemPrompt, systemPrompt,
userPrompt, userPrompt,
model, model,
customHeader,
customBody,
}) => { }) => {
systemPrompt = systemPrompt systemPrompt = systemPrompt
.replaceAll(INPUT_PLACE_FROM, from) .replaceAll(INPUT_PLACE_FROM, from)
@@ -454,17 +484,22 @@ const genOllama = ({
.replaceAll(INPUT_PLACE_TO, to) .replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text); .replaceAll(INPUT_PLACE_TEXT, text);
customHeader = JSON.parse("{" + customHeader + "}");
customBody = JSON.parse("{" + customBody + "}");
const data = { const data = {
model, model,
system: systemPrompt, system: systemPrompt,
prompt: userPrompt, prompt: userPrompt,
think: think, think: think,
stream: false, stream: false,
...customBody,
}; };
const init = { const init = {
headers: { headers: {
"Content-type": "application/json", "Content-type": "application/json",
...customHeader,
}, },
method: "POST", method: "POST",
body: JSON.stringify(data), body: JSON.stringify(data),

View File

@@ -228,6 +228,22 @@ export const I18N = {
zh: `请求超时时间 (5000-30000ms)`, zh: `请求超时时间 (5000-30000ms)`,
en: `Request Timeout Time (5000-30000ms)`, en: `Request Timeout Time (5000-30000ms)`,
}, },
custom_header: {
zh: `自定义Header参数`,
en: `Custom Header Params`,
},
custom_header_help: {
zh: `使用JSON格式例如 "User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:141.0) Gecko/20100101 Firefox/141.0"`,
en: `Use JSON format, for example "User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:141.0) Gecko/20100101 Firefox/141.0"`,
},
custom_body: {
zh: `自定义Body参数`,
en: `Custom Body Params`,
},
custom_body_help: {
zh: `使用JSON格式例如 "top_p": 0.7`,
en: `Use JSON format, for example "top_p": 0.7`,
},
min_translate_length: { min_translate_length: {
zh: `最小翻译字符数 (1-100)`, zh: `最小翻译字符数 (1-100)`,
en: `Minimum number Of Translated Characters (1-100)`, en: `Minimum number Of Translated Characters (1-100)`,

View File

@@ -563,6 +563,8 @@ const defaultOpenaiApi = {
model: "gpt-4", model: "gpt-4",
systemPrompt: `You are a professional, authentic machine translation engine.`, systemPrompt: `You are a professional, authentic machine translation engine.`,
userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`, userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`,
customHeader: "",
customBody: "",
temperature: 0, temperature: 0,
maxTokens: 256, maxTokens: 256,
fetchLimit: 1, fetchLimit: 1,
@@ -577,6 +579,8 @@ const defaultOllamaApi = {
model: "llama3.1", model: "llama3.1",
systemPrompt: `You are a professional, authentic machine translation engine.`, systemPrompt: `You are a professional, authentic machine translation engine.`,
userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`, userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`,
customHeader: "",
customBody: "",
think: false, think: false,
thinkIgnore: `qwen3,deepseek-r1`, thinkIgnore: `qwen3,deepseek-r1`,
fetchLimit: 1, fetchLimit: 1,
@@ -677,6 +681,8 @@ export const DEFAULT_TRANS_APIS = {
model: "gemini-2.5-flash", model: "gemini-2.5-flash",
systemPrompt: `You are a professional, authentic machine translation engine.`, systemPrompt: `You are a professional, authentic machine translation engine.`,
userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`, userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`,
customHeader: "",
customBody: "",
temperature: 0, temperature: 0,
maxTokens: 2048, maxTokens: 2048,
fetchLimit: 1, fetchLimit: 1,
@@ -691,6 +697,8 @@ export const DEFAULT_TRANS_APIS = {
model: "gemini-2.0-flash", model: "gemini-2.0-flash",
systemPrompt: `You are a professional, authentic machine translation engine.`, systemPrompt: `You are a professional, authentic machine translation engine.`,
userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`, userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`,
customHeader: "",
customBody: "",
temperature: 0, temperature: 0,
maxTokens: 2048, maxTokens: 2048,
fetchLimit: 1, fetchLimit: 1,
@@ -705,6 +713,8 @@ export const DEFAULT_TRANS_APIS = {
model: "claude-3-haiku-20240307", model: "claude-3-haiku-20240307",
systemPrompt: `You are a professional, authentic machine translation engine.`, systemPrompt: `You are a professional, authentic machine translation engine.`,
userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`, userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`,
customHeader: "",
customBody: "",
temperature: 0, temperature: 0,
maxTokens: 1024, maxTokens: 1024,
fetchLimit: 1, fetchLimit: 1,

View File

@@ -125,6 +125,8 @@ function ApiFields({ translator, api, updateApi, resetApi }) {
model = "", model = "",
systemPrompt = "", systemPrompt = "",
userPrompt = "", userPrompt = "",
customHeader = "",
customBody = "",
think = false, think = false,
thinkIgnore = "", thinkIgnore = "",
fetchLimit = DEFAULT_FETCH_LIMIT, fetchLimit = DEFAULT_FETCH_LIMIT,
@@ -274,6 +276,26 @@ function ApiFields({ translator, api, updateApi, resetApi }) {
multiline multiline
maxRows={10} maxRows={10}
/> />
<TextField
size="small"
label={i18n("custom_header")}
name="customHeader"
value={customHeader}
onChange={handleChange}
multiline
maxRows={10}
helperText={i18n("custom_header_help")}
/>
<TextField
size="small"
label={i18n("custom_body")}
name="customBody"
value={customBody}
onChange={handleChange}
multiline
maxRows={10}
helperText={i18n("custom_body_help")}
/>
</> </>
)} )}