feat: add gemini2 api
This commit is contained in:
@@ -15,6 +15,7 @@ import {
|
||||
OPT_TRANS_OPENAI_2,
|
||||
OPT_TRANS_OPENAI_3,
|
||||
OPT_TRANS_GEMINI,
|
||||
OPT_TRANS_GEMINI_2,
|
||||
OPT_TRANS_CLAUDE,
|
||||
OPT_TRANS_CLOUDFLAREAI,
|
||||
OPT_TRANS_OLLAMA,
|
||||
@@ -262,7 +263,7 @@ export const apiTranslate = async ({
|
||||
isSame = to === res.src;
|
||||
break;
|
||||
case OPT_TRANS_GOOGLE_2:
|
||||
trText = res?.[0]?.[0]||"";
|
||||
trText = res?.[0]?.[0] || "";
|
||||
isSame = to === res.src;
|
||||
break;
|
||||
case OPT_TRANS_MICROSOFT:
|
||||
@@ -313,6 +314,7 @@ export const apiTranslate = async ({
|
||||
case OPT_TRANS_OPENAI:
|
||||
case OPT_TRANS_OPENAI_2:
|
||||
case OPT_TRANS_OPENAI_3:
|
||||
case OPT_TRANS_GEMINI_2:
|
||||
trText = res?.choices?.map((item) => item.message.content).join(" ");
|
||||
isSame = text === trText;
|
||||
break;
|
||||
@@ -334,10 +336,10 @@ export const apiTranslate = async ({
|
||||
case OPT_TRANS_OLLAMA_2:
|
||||
case OPT_TRANS_OLLAMA_3:
|
||||
const { thinkIgnore = "" } = apiSetting;
|
||||
const deepModels = thinkIgnore.split(',').filter(model => model.trim());
|
||||
if (deepModels.some(model => res?.model?.startsWith(model))) {
|
||||
trText = res?.response.replace(/<think>[\s\S]*<\/think>/i, '');
|
||||
}else{
|
||||
const deepModels = thinkIgnore.split(",").filter((model) => model.trim());
|
||||
if (deepModels.some((model) => res?.model?.startsWith(model))) {
|
||||
trText = res?.response.replace(/<think>[\s\S]*<\/think>/i, "");
|
||||
} else {
|
||||
trText = res?.response;
|
||||
}
|
||||
isSame = text === trText;
|
||||
|
||||
@@ -14,6 +14,7 @@ import {
|
||||
OPT_TRANS_OPENAI_2,
|
||||
OPT_TRANS_OPENAI_3,
|
||||
OPT_TRANS_GEMINI,
|
||||
OPT_TRANS_GEMINI_2,
|
||||
OPT_TRANS_CLAUDE,
|
||||
OPT_TRANS_CLOUDFLAREAI,
|
||||
OPT_TRANS_OLLAMA,
|
||||
@@ -337,6 +338,55 @@ const genGemini = ({
|
||||
return [url, init];
|
||||
};
|
||||
|
||||
const genGemini2 = ({
|
||||
text,
|
||||
from,
|
||||
to,
|
||||
url,
|
||||
key,
|
||||
systemPrompt,
|
||||
userPrompt,
|
||||
model,
|
||||
temperature,
|
||||
reasoningEffort,
|
||||
}) => {
|
||||
systemPrompt = systemPrompt
|
||||
.replaceAll(INPUT_PLACE_FROM, from)
|
||||
.replaceAll(INPUT_PLACE_TO, to)
|
||||
.replaceAll(INPUT_PLACE_TEXT, text);
|
||||
userPrompt = userPrompt
|
||||
.replaceAll(INPUT_PLACE_FROM, from)
|
||||
.replaceAll(INPUT_PLACE_TO, to)
|
||||
.replaceAll(INPUT_PLACE_TEXT, text);
|
||||
|
||||
const data = {
|
||||
model,
|
||||
reasoning_effort: reasoningEffort,
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: systemPrompt,
|
||||
},
|
||||
{
|
||||
role: "user",
|
||||
content: userPrompt,
|
||||
},
|
||||
],
|
||||
temperature,
|
||||
};
|
||||
|
||||
const init = {
|
||||
headers: {
|
||||
"Content-type": "application/json",
|
||||
Authorization: `Bearer ${key}`,
|
||||
},
|
||||
method: "POST",
|
||||
body: JSON.stringify(data),
|
||||
};
|
||||
|
||||
return [url, init];
|
||||
};
|
||||
|
||||
const genClaude = ({
|
||||
text,
|
||||
from,
|
||||
@@ -493,6 +543,7 @@ export const genTransReq = ({ translator, text, from, to }, apiSetting) => {
|
||||
case OPT_TRANS_OPENAI_2:
|
||||
case OPT_TRANS_OPENAI_3:
|
||||
case OPT_TRANS_GEMINI:
|
||||
case OPT_TRANS_GEMINI_2:
|
||||
case OPT_TRANS_CLAUDE:
|
||||
case OPT_TRANS_CLOUDFLAREAI:
|
||||
case OPT_TRANS_OLLAMA:
|
||||
@@ -539,6 +590,8 @@ export const genTransReq = ({ translator, text, from, to }, apiSetting) => {
|
||||
return genOpenAI(args);
|
||||
case OPT_TRANS_GEMINI:
|
||||
return genGemini(args);
|
||||
case OPT_TRANS_GEMINI_2:
|
||||
return genGemini2(args);
|
||||
case OPT_TRANS_CLAUDE:
|
||||
return genClaude(args);
|
||||
case OPT_TRANS_CLOUDFLAREAI:
|
||||
|
||||
@@ -130,6 +130,7 @@ export const OPT_TRANS_OPENAI = "OpenAI";
|
||||
export const OPT_TRANS_OPENAI_2 = "OpenAI2";
|
||||
export const OPT_TRANS_OPENAI_3 = "OpenAI3";
|
||||
export const OPT_TRANS_GEMINI = "Gemini";
|
||||
export const OPT_TRANS_GEMINI_2 = "Gemini2";
|
||||
export const OPT_TRANS_CLAUDE = "Claude";
|
||||
export const OPT_TRANS_CLOUDFLAREAI = "CloudflareAI";
|
||||
export const OPT_TRANS_OLLAMA = "Ollama";
|
||||
@@ -155,6 +156,7 @@ export const OPT_TRANS_ALL = [
|
||||
OPT_TRANS_OPENAI_2,
|
||||
OPT_TRANS_OPENAI_3,
|
||||
OPT_TRANS_GEMINI,
|
||||
OPT_TRANS_GEMINI_2,
|
||||
OPT_TRANS_CLAUDE,
|
||||
OPT_TRANS_CLOUDFLAREAI,
|
||||
OPT_TRANS_OLLAMA,
|
||||
@@ -314,6 +316,9 @@ export const OPT_LANGS_SPECIAL = {
|
||||
[OPT_TRANS_GEMINI]: new Map(
|
||||
OPT_LANGS_FROM.map(([key, val]) => [key, val.split(" - ")[0]])
|
||||
),
|
||||
[OPT_TRANS_GEMINI_2]: new Map(
|
||||
OPT_LANGS_FROM.map(([key, val]) => [key, val.split(" - ")[0]])
|
||||
),
|
||||
[OPT_TRANS_CLAUDE]: new Map(
|
||||
OPT_LANGS_FROM.map(([key, val]) => [key, val.split(" - ")[0]])
|
||||
),
|
||||
@@ -680,6 +685,20 @@ export const DEFAULT_TRANS_APIS = {
|
||||
isDisabled: false,
|
||||
httpTimeout: DEFAULT_HTTP_TIMEOUT * 2,
|
||||
},
|
||||
[OPT_TRANS_GEMINI_2]: {
|
||||
url: `https://generativelanguage.googleapis.com/v1beta/openai/chat/completions`,
|
||||
key: "",
|
||||
model: "gemini-2.0-flash",
|
||||
systemPrompt: `You are a professional, authentic machine translation engine.`,
|
||||
userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`,
|
||||
temperature: 0,
|
||||
reasoningEffort: "low",
|
||||
fetchLimit: 1,
|
||||
fetchInterval: 500,
|
||||
apiName: OPT_TRANS_GEMINI_2,
|
||||
isDisabled: false,
|
||||
httpTimeout: DEFAULT_HTTP_TIMEOUT * 2,
|
||||
},
|
||||
[OPT_TRANS_CLAUDE]: {
|
||||
url: "https://api.anthropic.com/v1/messages",
|
||||
key: "",
|
||||
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
OPT_TRANS_OPENAI_2,
|
||||
OPT_TRANS_OPENAI_3,
|
||||
OPT_TRANS_GEMINI,
|
||||
OPT_TRANS_GEMINI_2,
|
||||
OPT_TRANS_CLAUDE,
|
||||
OPT_TRANS_CLOUDFLAREAI,
|
||||
OPT_TRANS_OLLAMA,
|
||||
@@ -139,6 +140,7 @@ function ApiFields({ translator }) {
|
||||
maxTokens = 256,
|
||||
apiName = "",
|
||||
isDisabled = false,
|
||||
reasoningEffort = "low",
|
||||
} = api;
|
||||
|
||||
const handleChange = (e) => {
|
||||
@@ -180,6 +182,7 @@ function ApiFields({ translator }) {
|
||||
OPT_TRANS_OPENAI_2,
|
||||
OPT_TRANS_OPENAI_3,
|
||||
OPT_TRANS_GEMINI,
|
||||
OPT_TRANS_GEMINI_2,
|
||||
OPT_TRANS_CLAUDE,
|
||||
OPT_TRANS_CLOUDFLAREAI,
|
||||
OPT_TRANS_OLLAMA,
|
||||
@@ -247,7 +250,7 @@ function ApiFields({ translator }) {
|
||||
{(translator.startsWith(OPT_TRANS_OPENAI) ||
|
||||
translator.startsWith(OPT_TRANS_OLLAMA) ||
|
||||
translator === OPT_TRANS_CLAUDE ||
|
||||
translator === OPT_TRANS_GEMINI) && (
|
||||
translator.startsWith(OPT_TRANS_GEMINI)) && (
|
||||
<>
|
||||
<TextField
|
||||
size="small"
|
||||
@@ -323,6 +326,32 @@ function ApiFields({ translator }) {
|
||||
</>
|
||||
)}
|
||||
|
||||
{translator === OPT_TRANS_GEMINI_2 && (
|
||||
<>
|
||||
<TextField
|
||||
size="small"
|
||||
label="Temperature"
|
||||
type="number"
|
||||
name="temperature"
|
||||
value={temperature}
|
||||
onChange={handleChange}
|
||||
/>
|
||||
<TextField
|
||||
select
|
||||
size="small"
|
||||
name="reasoningEffort"
|
||||
value={reasoningEffort}
|
||||
label="Reasoning Effort"
|
||||
onChange={handleChange}
|
||||
>
|
||||
<MenuItem value={"none"}>none</MenuItem>
|
||||
<MenuItem value={"low"}>low</MenuItem>
|
||||
<MenuItem value={"medium"}>medium</MenuItem>
|
||||
<MenuItem value={"high"}>high</MenuItem>
|
||||
</TextField>
|
||||
</>
|
||||
)}
|
||||
|
||||
{translator === OPT_TRANS_NIUTRANS && (
|
||||
<>
|
||||
<TextField
|
||||
|
||||
Reference in New Issue
Block a user