feat: add gemini2 api

This commit is contained in:
Gabe
2025-07-02 21:54:18 +08:00
parent 94bf5f9580
commit b2a1309caa
4 changed files with 109 additions and 6 deletions

View File

@@ -15,6 +15,7 @@ import {
OPT_TRANS_OPENAI_2, OPT_TRANS_OPENAI_2,
OPT_TRANS_OPENAI_3, OPT_TRANS_OPENAI_3,
OPT_TRANS_GEMINI, OPT_TRANS_GEMINI,
OPT_TRANS_GEMINI_2,
OPT_TRANS_CLAUDE, OPT_TRANS_CLAUDE,
OPT_TRANS_CLOUDFLAREAI, OPT_TRANS_CLOUDFLAREAI,
OPT_TRANS_OLLAMA, OPT_TRANS_OLLAMA,
@@ -313,6 +314,7 @@ export const apiTranslate = async ({
case OPT_TRANS_OPENAI: case OPT_TRANS_OPENAI:
case OPT_TRANS_OPENAI_2: case OPT_TRANS_OPENAI_2:
case OPT_TRANS_OPENAI_3: case OPT_TRANS_OPENAI_3:
case OPT_TRANS_GEMINI_2:
trText = res?.choices?.map((item) => item.message.content).join(" "); trText = res?.choices?.map((item) => item.message.content).join(" ");
isSame = text === trText; isSame = text === trText;
break; break;
@@ -334,9 +336,9 @@ export const apiTranslate = async ({
case OPT_TRANS_OLLAMA_2: case OPT_TRANS_OLLAMA_2:
case OPT_TRANS_OLLAMA_3: case OPT_TRANS_OLLAMA_3:
const { thinkIgnore = "" } = apiSetting; const { thinkIgnore = "" } = apiSetting;
const deepModels = thinkIgnore.split(',').filter(model => model.trim()); const deepModels = thinkIgnore.split(",").filter((model) => model.trim());
if (deepModels.some(model => res?.model?.startsWith(model))) { if (deepModels.some((model) => res?.model?.startsWith(model))) {
trText = res?.response.replace(/<think>[\s\S]*<\/think>/i, ''); trText = res?.response.replace(/<think>[\s\S]*<\/think>/i, "");
} else { } else {
trText = res?.response; trText = res?.response;
} }

View File

@@ -14,6 +14,7 @@ import {
OPT_TRANS_OPENAI_2, OPT_TRANS_OPENAI_2,
OPT_TRANS_OPENAI_3, OPT_TRANS_OPENAI_3,
OPT_TRANS_GEMINI, OPT_TRANS_GEMINI,
OPT_TRANS_GEMINI_2,
OPT_TRANS_CLAUDE, OPT_TRANS_CLAUDE,
OPT_TRANS_CLOUDFLAREAI, OPT_TRANS_CLOUDFLAREAI,
OPT_TRANS_OLLAMA, OPT_TRANS_OLLAMA,
@@ -337,6 +338,55 @@ const genGemini = ({
return [url, init]; return [url, init];
}; };
const genGemini2 = ({
text,
from,
to,
url,
key,
systemPrompt,
userPrompt,
model,
temperature,
reasoningEffort,
}) => {
systemPrompt = systemPrompt
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
userPrompt = userPrompt
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
const data = {
model,
reasoning_effort: reasoningEffort,
messages: [
{
role: "system",
content: systemPrompt,
},
{
role: "user",
content: userPrompt,
},
],
temperature,
};
const init = {
headers: {
"Content-type": "application/json",
Authorization: `Bearer ${key}`,
},
method: "POST",
body: JSON.stringify(data),
};
return [url, init];
};
const genClaude = ({ const genClaude = ({
text, text,
from, from,
@@ -493,6 +543,7 @@ export const genTransReq = ({ translator, text, from, to }, apiSetting) => {
case OPT_TRANS_OPENAI_2: case OPT_TRANS_OPENAI_2:
case OPT_TRANS_OPENAI_3: case OPT_TRANS_OPENAI_3:
case OPT_TRANS_GEMINI: case OPT_TRANS_GEMINI:
case OPT_TRANS_GEMINI_2:
case OPT_TRANS_CLAUDE: case OPT_TRANS_CLAUDE:
case OPT_TRANS_CLOUDFLAREAI: case OPT_TRANS_CLOUDFLAREAI:
case OPT_TRANS_OLLAMA: case OPT_TRANS_OLLAMA:
@@ -539,6 +590,8 @@ export const genTransReq = ({ translator, text, from, to }, apiSetting) => {
return genOpenAI(args); return genOpenAI(args);
case OPT_TRANS_GEMINI: case OPT_TRANS_GEMINI:
return genGemini(args); return genGemini(args);
case OPT_TRANS_GEMINI_2:
return genGemini2(args);
case OPT_TRANS_CLAUDE: case OPT_TRANS_CLAUDE:
return genClaude(args); return genClaude(args);
case OPT_TRANS_CLOUDFLAREAI: case OPT_TRANS_CLOUDFLAREAI:

View File

@@ -130,6 +130,7 @@ export const OPT_TRANS_OPENAI = "OpenAI";
export const OPT_TRANS_OPENAI_2 = "OpenAI2"; export const OPT_TRANS_OPENAI_2 = "OpenAI2";
export const OPT_TRANS_OPENAI_3 = "OpenAI3"; export const OPT_TRANS_OPENAI_3 = "OpenAI3";
export const OPT_TRANS_GEMINI = "Gemini"; export const OPT_TRANS_GEMINI = "Gemini";
export const OPT_TRANS_GEMINI_2 = "Gemini2";
export const OPT_TRANS_CLAUDE = "Claude"; export const OPT_TRANS_CLAUDE = "Claude";
export const OPT_TRANS_CLOUDFLAREAI = "CloudflareAI"; export const OPT_TRANS_CLOUDFLAREAI = "CloudflareAI";
export const OPT_TRANS_OLLAMA = "Ollama"; export const OPT_TRANS_OLLAMA = "Ollama";
@@ -155,6 +156,7 @@ export const OPT_TRANS_ALL = [
OPT_TRANS_OPENAI_2, OPT_TRANS_OPENAI_2,
OPT_TRANS_OPENAI_3, OPT_TRANS_OPENAI_3,
OPT_TRANS_GEMINI, OPT_TRANS_GEMINI,
OPT_TRANS_GEMINI_2,
OPT_TRANS_CLAUDE, OPT_TRANS_CLAUDE,
OPT_TRANS_CLOUDFLAREAI, OPT_TRANS_CLOUDFLAREAI,
OPT_TRANS_OLLAMA, OPT_TRANS_OLLAMA,
@@ -314,6 +316,9 @@ export const OPT_LANGS_SPECIAL = {
[OPT_TRANS_GEMINI]: new Map( [OPT_TRANS_GEMINI]: new Map(
OPT_LANGS_FROM.map(([key, val]) => [key, val.split(" - ")[0]]) OPT_LANGS_FROM.map(([key, val]) => [key, val.split(" - ")[0]])
), ),
[OPT_TRANS_GEMINI_2]: new Map(
OPT_LANGS_FROM.map(([key, val]) => [key, val.split(" - ")[0]])
),
[OPT_TRANS_CLAUDE]: new Map( [OPT_TRANS_CLAUDE]: new Map(
OPT_LANGS_FROM.map(([key, val]) => [key, val.split(" - ")[0]]) OPT_LANGS_FROM.map(([key, val]) => [key, val.split(" - ")[0]])
), ),
@@ -680,6 +685,20 @@ export const DEFAULT_TRANS_APIS = {
isDisabled: false, isDisabled: false,
httpTimeout: DEFAULT_HTTP_TIMEOUT * 2, httpTimeout: DEFAULT_HTTP_TIMEOUT * 2,
}, },
[OPT_TRANS_GEMINI_2]: {
url: `https://generativelanguage.googleapis.com/v1beta/openai/chat/completions`,
key: "",
model: "gemini-2.0-flash",
systemPrompt: `You are a professional, authentic machine translation engine.`,
userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`,
temperature: 0,
reasoningEffort: "low",
fetchLimit: 1,
fetchInterval: 500,
apiName: OPT_TRANS_GEMINI_2,
isDisabled: false,
httpTimeout: DEFAULT_HTTP_TIMEOUT * 2,
},
[OPT_TRANS_CLAUDE]: { [OPT_TRANS_CLAUDE]: {
url: "https://api.anthropic.com/v1/messages", url: "https://api.anthropic.com/v1/messages",
key: "", key: "",

View File

@@ -18,6 +18,7 @@ import {
OPT_TRANS_OPENAI_2, OPT_TRANS_OPENAI_2,
OPT_TRANS_OPENAI_3, OPT_TRANS_OPENAI_3,
OPT_TRANS_GEMINI, OPT_TRANS_GEMINI,
OPT_TRANS_GEMINI_2,
OPT_TRANS_CLAUDE, OPT_TRANS_CLAUDE,
OPT_TRANS_CLOUDFLAREAI, OPT_TRANS_CLOUDFLAREAI,
OPT_TRANS_OLLAMA, OPT_TRANS_OLLAMA,
@@ -139,6 +140,7 @@ function ApiFields({ translator }) {
maxTokens = 256, maxTokens = 256,
apiName = "", apiName = "",
isDisabled = false, isDisabled = false,
reasoningEffort = "low",
} = api; } = api;
const handleChange = (e) => { const handleChange = (e) => {
@@ -180,6 +182,7 @@ function ApiFields({ translator }) {
OPT_TRANS_OPENAI_2, OPT_TRANS_OPENAI_2,
OPT_TRANS_OPENAI_3, OPT_TRANS_OPENAI_3,
OPT_TRANS_GEMINI, OPT_TRANS_GEMINI,
OPT_TRANS_GEMINI_2,
OPT_TRANS_CLAUDE, OPT_TRANS_CLAUDE,
OPT_TRANS_CLOUDFLAREAI, OPT_TRANS_CLOUDFLAREAI,
OPT_TRANS_OLLAMA, OPT_TRANS_OLLAMA,
@@ -247,7 +250,7 @@ function ApiFields({ translator }) {
{(translator.startsWith(OPT_TRANS_OPENAI) || {(translator.startsWith(OPT_TRANS_OPENAI) ||
translator.startsWith(OPT_TRANS_OLLAMA) || translator.startsWith(OPT_TRANS_OLLAMA) ||
translator === OPT_TRANS_CLAUDE || translator === OPT_TRANS_CLAUDE ||
translator === OPT_TRANS_GEMINI) && ( translator.startsWith(OPT_TRANS_GEMINI)) && (
<> <>
<TextField <TextField
size="small" size="small"
@@ -323,6 +326,32 @@ function ApiFields({ translator }) {
</> </>
)} )}
{translator === OPT_TRANS_GEMINI_2 && (
<>
<TextField
size="small"
label="Temperature"
type="number"
name="temperature"
value={temperature}
onChange={handleChange}
/>
<TextField
select
size="small"
name="reasoningEffort"
value={reasoningEffort}
label="Reasoning Effort"
onChange={handleChange}
>
<MenuItem value={"none"}>none</MenuItem>
<MenuItem value={"low"}>low</MenuItem>
<MenuItem value={"medium"}>medium</MenuItem>
<MenuItem value={"high"}>high</MenuItem>
</TextField>
</>
)}
{translator === OPT_TRANS_NIUTRANS && ( {translator === OPT_TRANS_NIUTRANS && (
<> <>
<TextField <TextField