feat: Support batch fetch, and update AI prompt

This commit is contained in:
Gabe
2025-09-03 00:37:35 +08:00
parent 2bfb27f346
commit d4e58fc925
14 changed files with 576 additions and 244 deletions

View File

@@ -1,11 +1,11 @@
import queryString from "query-string"; import queryString from "query-string";
import { DEFAULT_USER_AGENT } from "../config"; import { DEFAULT_USER_AGENT } from "../config";
export const genBaidu = async ({ text, from, to }) => { export const genBaidu = async ({ texts, from, to }) => {
const data = { const data = {
from, from,
to, to,
query: text, query: texts.join(" "),
source: "txt", source: "txt",
}; };

View File

@@ -1,6 +1,7 @@
let id = 1e4 * Math.round(1e4 * Math.random()); let id = 1e4 * Math.round(1e4 * Math.random());
export const genDeeplFree = ({ text, from, to }) => { export const genDeeplFree = ({ texts, from, to }) => {
const text = texts.join(" ");
const iCount = (text.match(/[i]/g) || []).length + 1; const iCount = (text.match(/[i]/g) || []).length + 1;
let timestamp = Date.now(); let timestamp = Date.now();
timestamp = timestamp + (iCount - (timestamp % iCount)); timestamp = timestamp + (iCount - (timestamp % iCount));

View File

@@ -7,12 +7,14 @@ import {
OPT_LANGS_TENCENT, OPT_LANGS_TENCENT,
OPT_LANGS_SPECIAL, OPT_LANGS_SPECIAL,
OPT_LANGS_MICROSOFT, OPT_LANGS_MICROSOFT,
OPT_TRANS_BATCH,
} from "../config"; } from "../config";
import { sha256 } from "../libs/utils"; import { sha256 } from "../libs/utils";
import { msAuth } from "../libs/auth"; import { msAuth } from "../libs/auth";
import { kissLog } from "../libs/log"; import { kissLog } from "../libs/log";
import { genTransReq, parseTransRes } from "./trans"; import { fetchTranslate } from "./trans";
import { getHttpCachePolyfill, putHttpCachePolyfill } from "../libs/cache"; import { getHttpCachePolyfill, putHttpCachePolyfill } from "../libs/cache";
import { getBatchQueue } from "../libs/batchQueue";
/** /**
* 同步数据 * 同步数据
@@ -203,13 +205,10 @@ export const apiTranslate = async ({
fromLang, fromLang,
toLang, toLang,
apiSetting = {}, apiSetting = {},
docInfo = {},
useCache = true, useCache = true,
usePool = true, usePool = true,
}) => { }) => {
let cacheInput; // 缓存URL
let resCache; // 缓存对象
let res; // 翻译接口返回的JSON数据
if (!text) { if (!text) {
return ["", false]; return ["", false];
} }
@@ -223,57 +222,59 @@ export const apiTranslate = async ({
return ["", false]; return ["", false];
} }
// 查询缓存数据 // TODO: 优化缓存失效因素
// TODO 优化缓存失效因素
if (useCache) {
const [v1, v2] = process.env.REACT_APP_VERSION.split("."); const [v1, v2] = process.env.REACT_APP_VERSION.split(".");
const cacheOpts = { const cacheOpts = {
translator, translator,
text, text,
fromLang, fromLang,
toLang, toLang,
userPrompt: apiSetting.userPrompt, // prompt改变缓存失效
model: apiSetting.model, // model改变缓存失效 model: apiSetting.model, // model改变缓存失效
version: [v1, v2].join("."), version: [v1, v2].join("."),
}; };
cacheInput = `${URL_CACHE_TRAN}?${queryString.stringify(cacheOpts)}`; const cacheInput = `${URL_CACHE_TRAN}?${queryString.stringify(cacheOpts)}`;
resCache = await getHttpCachePolyfill(cacheInput);
// 查询缓存数据
if (useCache) {
const cache = (await getHttpCachePolyfill(cacheInput)) || {};
if (cache.trText) {
return [cache.trText, cache.isSame];
}
} }
// 请求接口数据 // 请求接口数据
if (!resCache) { let trText = "";
const [input, init] = await genTransReq(translator, { let srLang = "";
text, if (apiSetting.isBatchFetch && OPT_TRANS_BATCH.has(translator)) {
from, const queue = getBatchQueue(
to, { translator, from, to, docInfo, apiSetting, usePool },
...apiSetting, apiSetting
}); );
res = await fetchData(input, init, { const tranlation = await queue.addTask({ text });
useCache: false, if (Array.isArray(tranlation)) {
usePool, [trText, srLang = ""] = tranlation;
fetchInterval: apiSetting.fetchInterval, }
fetchLimit: apiSetting.fetchLimit,
httpTimeout: apiSetting.httpTimeout,
});
} else { } else {
res = resCache; const translations = await fetchTranslate({
} translator,
texts: [text],
if (!res) {
return ["", false];
}
// 解析返回数据
const [trText, isSame] = parseTransRes(translator, res, apiSetting, {
text,
from, from,
to, to,
docInfo,
apiSetting,
usePool,
}); });
if (Array.isArray(translations?.[0])) {
[trText, srLang = ""] = translations[0];
}
}
const isSame = srLang && (to.includes(srLang) || srLang.includes(to));
// 插入缓存 // 插入缓存
if (useCache && !resCache && trText) { if (useCache && trText) {
await putHttpCachePolyfill(cacheInput, null, res); await putHttpCachePolyfill(cacheInput, null, { trText, isSame, srLang });
} }
return [trText, isSame, res]; return [trText, isSame];
}; };

View File

@@ -26,7 +26,6 @@ import {
OPT_TRANS_CUSTOMIZE_3, OPT_TRANS_CUSTOMIZE_3,
OPT_TRANS_CUSTOMIZE_4, OPT_TRANS_CUSTOMIZE_4,
OPT_TRANS_CUSTOMIZE_5, OPT_TRANS_CUSTOMIZE_5,
INPUT_PLACE_URL,
INPUT_PLACE_FROM, INPUT_PLACE_FROM,
INPUT_PLACE_TO, INPUT_PLACE_TO,
INPUT_PLACE_TEXT, INPUT_PLACE_TEXT,
@@ -37,7 +36,9 @@ import { msAuth } from "../libs/auth";
import { genDeeplFree } from "./deepl"; import { genDeeplFree } from "./deepl";
import { genBaidu } from "./baidu"; import { genBaidu } from "./baidu";
import interpreter from "../libs/interpreter"; import interpreter from "../libs/interpreter";
import { parseJsonObj } from "../libs/utils"; import { parseJsonObj, extractJson } from "../libs/utils";
import { kissLog } from "../libs/log";
import { fetchData } from "../libs/fetch";
const keyMap = new Map(); const keyMap = new Map();
const urlMap = new Map(); const urlMap = new Map();
@@ -60,7 +61,48 @@ const keyPick = (translator, key = "", cacheMap) => {
return keys[curIndex]; return keys[curIndex];
}; };
const genGoogle = ({ text, from, to, url, key }) => { const genSystemPrompt = ({ systemPrompt, from, to }) =>
systemPrompt
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to);
const genUserPrompt = ({ userPrompt, from, to, texts, docInfo }) => {
const prompt = JSON.stringify({
targetLanguage: to,
title: docInfo.title,
description: docInfo.description,
segments: texts.map((text, i) => ({ id: i, text })),
});
if (userPrompt.includes(INPUT_PLACE_TEXT)) {
return userPrompt
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, prompt);
}
return prompt;
};
const parseTranslations = (raw) => {
let data;
try {
const jsonString = extractJson(raw);
data = JSON.parse(jsonString);
} catch (err) {
kissLog(err, "parseTranslations");
data = { translations: [] };
}
if (!Array.isArray(data.translations)) {
data.translations = [];
}
return data.translations.map((item) => [item.text]);
};
const genGoogle = ({ texts, from, to, url, key }) => {
const params = { const params = {
client: "gtx", client: "gtx",
dt: "t", dt: "t",
@@ -68,7 +110,7 @@ const genGoogle = ({ text, from, to, url, key }) => {
ie: "UTF-8", ie: "UTF-8",
sl: from, sl: from,
tl: to, tl: to,
q: text, q: texts.join(" "),
}; };
const input = `${url}?${queryString.stringify(params)}`; const input = `${url}?${queryString.stringify(params)}`;
const init = { const init = {
@@ -83,8 +125,8 @@ const genGoogle = ({ text, from, to, url, key }) => {
return [input, init]; return [input, init];
}; };
const genGoogle2 = ({ text, from, to, url, key }) => { const genGoogle2 = ({ texts, from, to, url, key }) => {
const body = JSON.stringify([[[text], from, to], "wt_lib"]); const body = JSON.stringify([[texts, from, to], "wt_lib"]);
const init = { const init = {
method: "POST", method: "POST",
headers: { headers: {
@@ -97,7 +139,7 @@ const genGoogle2 = ({ text, from, to, url, key }) => {
return [url, init]; return [url, init];
}; };
const genMicrosoft = async ({ text, from, to }) => { const genMicrosoft = async ({ texts, from, to }) => {
const [token] = await msAuth(); const [token] = await msAuth();
const params = { const params = {
from, from,
@@ -111,15 +153,15 @@ const genMicrosoft = async ({ text, from, to }) => {
Authorization: `Bearer ${token}`, Authorization: `Bearer ${token}`,
}, },
method: "POST", method: "POST",
body: JSON.stringify([{ Text: text }]), body: JSON.stringify(texts.map((text) => ({ Text: text }))),
}; };
return [input, init]; return [input, init];
}; };
const genDeepl = ({ text, from, to, url, key }) => { const genDeepl = ({ texts, from, to, url, key }) => {
const data = { const data = {
text: [text], text: texts,
target_lang: to, target_lang: to,
source_lang: from, source_lang: from,
// split_sentences: "0", // split_sentences: "0",
@@ -136,9 +178,9 @@ const genDeepl = ({ text, from, to, url, key }) => {
return [url, init]; return [url, init];
}; };
const genDeeplX = ({ text, from, to, url, key }) => { const genDeeplX = ({ texts, from, to, url, key }) => {
const data = { const data = {
text, text: texts.join(" "),
target_lang: to, target_lang: to,
source_lang: from, source_lang: from,
}; };
@@ -157,12 +199,12 @@ const genDeeplX = ({ text, from, to, url, key }) => {
return [url, init]; return [url, init];
}; };
const genNiuTrans = ({ text, from, to, url, key, dictNo, memoryNo }) => { const genNiuTrans = ({ texts, from, to, url, key, dictNo, memoryNo }) => {
const data = { const data = {
from, from,
to, to,
apikey: key, apikey: key,
src_text: text, src_text: texts.join(" "),
dictNo, dictNo,
memoryNo, memoryNo,
}; };
@@ -178,7 +220,7 @@ const genNiuTrans = ({ text, from, to, url, key, dictNo, memoryNo }) => {
return [url, init]; return [url, init];
}; };
const genTencent = ({ text, from, to }) => { const genTencent = ({ texts, from, to }) => {
const data = { const data = {
header: { header: {
fn: "auto_translation", fn: "auto_translation",
@@ -188,7 +230,7 @@ const genTencent = ({ text, from, to }) => {
type: "plain", type: "plain",
model_category: "normal", model_category: "normal",
source: { source: {
text_list: [text], text_list: texts,
lang: from, lang: from,
}, },
target: { target: {
@@ -211,11 +253,11 @@ const genTencent = ({ text, from, to }) => {
return [input, init]; return [input, init];
}; };
const genVolcengine = ({ text, from, to }) => { const genVolcengine = ({ texts, from, to }) => {
const data = { const data = {
source_language: from, source_language: from,
target_language: to, target_language: to,
text: text, text: texts.join(" "),
}; };
const input = "https://translate.volcengine.com/crx/translate/v1"; const input = "https://translate.volcengine.com/crx/translate/v1";
@@ -231,7 +273,7 @@ const genVolcengine = ({ text, from, to }) => {
}; };
const genOpenAI = ({ const genOpenAI = ({
text, texts,
from, from,
to, to,
url, url,
@@ -243,21 +285,10 @@ const genOpenAI = ({
maxTokens, maxTokens,
customHeader, customHeader,
customBody, customBody,
docInfo,
}) => { }) => {
// 兼容历史上作为systemPrompt的prompt如果prompt中不包含带翻译文本则添加文本到prompt末尾 systemPrompt = genSystemPrompt({ systemPrompt, from, to });
// if (!prompt.includes(INPUT_PLACE_TEXT)) { userPrompt = genUserPrompt({ userPrompt, from, to, texts, docInfo });
// prompt += `\nSource Text: ${INPUT_PLACE_TEXT}`;
// }
systemPrompt = systemPrompt
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
userPrompt = userPrompt
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
// TODO: 同时支持json对象和hook函数
customHeader = parseJsonObj(customHeader); customHeader = parseJsonObj(customHeader);
customBody = parseJsonObj(customBody); customBody = parseJsonObj(customBody);
@@ -293,7 +324,7 @@ const genOpenAI = ({
}; };
const genGemini = ({ const genGemini = ({
text, texts,
from, from,
to, to,
url, url,
@@ -305,19 +336,13 @@ const genGemini = ({
maxTokens, maxTokens,
customHeader, customHeader,
customBody, customBody,
docInfo,
}) => { }) => {
url = url url = url
.replaceAll(INPUT_PLACE_MODEL, model) .replaceAll(INPUT_PLACE_MODEL, model)
.replaceAll(INPUT_PLACE_KEY, key); .replaceAll(INPUT_PLACE_KEY, key);
systemPrompt = systemPrompt systemPrompt = genSystemPrompt({ systemPrompt, from, to });
.replaceAll(INPUT_PLACE_FROM, from) userPrompt = genUserPrompt({ userPrompt, from, to, texts, docInfo });
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
userPrompt = userPrompt
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
customHeader = parseJsonObj(customHeader); customHeader = parseJsonObj(customHeader);
customBody = parseJsonObj(customBody); customBody = parseJsonObj(customBody);
@@ -355,7 +380,7 @@ const genGemini = ({
}; };
const genGemini2 = ({ const genGemini2 = ({
text, texts,
from, from,
to, to,
url, url,
@@ -367,16 +392,10 @@ const genGemini2 = ({
maxTokens, maxTokens,
customHeader, customHeader,
customBody, customBody,
docInfo,
}) => { }) => {
systemPrompt = systemPrompt systemPrompt = genSystemPrompt({ systemPrompt, from, to });
.replaceAll(INPUT_PLACE_FROM, from) userPrompt = genUserPrompt({ userPrompt, from, to, texts, docInfo });
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
userPrompt = userPrompt
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
customHeader = parseJsonObj(customHeader); customHeader = parseJsonObj(customHeader);
customBody = parseJsonObj(customBody); customBody = parseJsonObj(customBody);
@@ -411,7 +430,7 @@ const genGemini2 = ({
}; };
const genClaude = ({ const genClaude = ({
text, texts,
from, from,
to, to,
url, url,
@@ -423,16 +442,10 @@ const genClaude = ({
maxTokens, maxTokens,
customHeader, customHeader,
customBody, customBody,
docInfo,
}) => { }) => {
systemPrompt = systemPrompt systemPrompt = genSystemPrompt({ systemPrompt, from, to });
.replaceAll(INPUT_PLACE_FROM, from) userPrompt = genUserPrompt({ userPrompt, from, to, texts, docInfo });
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
userPrompt = userPrompt
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
customHeader = parseJsonObj(customHeader); customHeader = parseJsonObj(customHeader);
customBody = parseJsonObj(customBody); customBody = parseJsonObj(customBody);
@@ -466,7 +479,7 @@ const genClaude = ({
}; };
const genOpenRouter = ({ const genOpenRouter = ({
text, texts,
from, from,
to, to,
url, url,
@@ -478,16 +491,10 @@ const genOpenRouter = ({
maxTokens, maxTokens,
customHeader, customHeader,
customBody, customBody,
docInfo,
}) => { }) => {
systemPrompt = systemPrompt systemPrompt = genSystemPrompt({ systemPrompt, from, to });
.replaceAll(INPUT_PLACE_FROM, from) userPrompt = genUserPrompt({ userPrompt, from, to, texts, docInfo });
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
userPrompt = userPrompt
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
customHeader = parseJsonObj(customHeader); customHeader = parseJsonObj(customHeader);
customBody = parseJsonObj(customBody); customBody = parseJsonObj(customBody);
@@ -522,7 +529,7 @@ const genOpenRouter = ({
}; };
const genOllama = ({ const genOllama = ({
text, texts,
from, from,
to, to,
think, think,
@@ -533,16 +540,10 @@ const genOllama = ({
model, model,
customHeader, customHeader,
customBody, customBody,
docInfo,
}) => { }) => {
systemPrompt = systemPrompt systemPrompt = genSystemPrompt({ systemPrompt, from, to });
.replaceAll(INPUT_PLACE_FROM, from) userPrompt = genUserPrompt({ userPrompt, from, to, texts, docInfo });
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
userPrompt = userPrompt
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
customHeader = parseJsonObj(customHeader); customHeader = parseJsonObj(customHeader);
customBody = parseJsonObj(customBody); customBody = parseJsonObj(customBody);
@@ -570,9 +571,9 @@ const genOllama = ({
return [url, init]; return [url, init];
}; };
const genCloudflareAI = ({ text, from, to, url, key }) => { const genCloudflareAI = ({ texts, from, to, url, key }) => {
const data = { const data = {
text, text: texts.join(" "),
source_lang: from, source_lang: from,
target_lang: to, target_lang: to,
}; };
@@ -589,36 +590,21 @@ const genCloudflareAI = ({ text, from, to, url, key }) => {
return [url, init]; return [url, init];
}; };
const genCustom = ({ text, from, to, url, key, reqHook }) => { const genCustom = ({ texts, from, to, url, key, reqHook, docInfo }) => {
url = url
.replaceAll(INPUT_PLACE_URL, url)
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text)
.replaceAll(INPUT_PLACE_KEY, key);
let init = {};
if (reqHook?.trim()) { if (reqHook?.trim()) {
interpreter.run(`exports.reqHook = ${reqHook}`); interpreter.run(`exports.reqHook = ${reqHook}`);
[url, init] = interpreter.exports.reqHook(text, from, to, url, key); return interpreter.exports.reqHook({ texts, from, to, url, key, docInfo });
return [url, init];
} }
const data = { const data = { texts, from, to };
text, const init = {
from,
to,
};
init = {
headers: { headers: {
"Content-type": "application/json", "Content-type": "application/json",
Authorization: `Bearer ${key}`,
}, },
method: "POST", method: "POST",
body: JSON.stringify(data), body: JSON.stringify(data),
}; };
if (key) {
init.headers.Authorization = `Bearer ${key}`;
}
return [url, init]; return [url, init];
}; };
@@ -710,123 +696,138 @@ export const genTransReq = (translator, args) => {
* 解析翻译接口返回数据 * 解析翻译接口返回数据
* @param {*} translator * @param {*} translator
* @param {*} res * @param {*} res
* @param {*} apiSetting
* @param {*} param3 * @param {*} param3
* @returns * @returns
*/ */
export const parseTransRes = ( export const parseTransRes = (
translator, translator,
res, res,
apiSetting, { texts, from, to, resHook }
{ text, from, to }
) => { ) => {
let trText = ""; // 返回的译文
let isSame = false; // 译文与原文语言是否相同
switch (translator) { switch (translator) {
case OPT_TRANS_GOOGLE: case OPT_TRANS_GOOGLE:
trText = res.sentences.map((item) => item.trans).join(" "); return [[res?.sentences?.map((item) => item.trans).join(" "), res?.src]];
isSame = to === res.src;
break;
case OPT_TRANS_GOOGLE_2: case OPT_TRANS_GOOGLE_2:
trText = res?.[0]?.[0] || ""; return res?.[0]?.map((_, i) => [res?.[0]?.[i], res?.[1]?.[i]]);
isSame = to === res.src;
break;
case OPT_TRANS_MICROSOFT: case OPT_TRANS_MICROSOFT:
trText = res return res?.map((item) => [
.map((item) => item.translations.map((item) => item.text).join(" ")) item.translations.map((item) => item.text).join(" "),
.join(" "); item.detectedLanguage.language,
isSame = text === trText; ]);
break;
case OPT_TRANS_DEEPL: case OPT_TRANS_DEEPL:
trText = res.translations.map((item) => item.text).join(" "); return res?.translations?.map((item) => [
isSame = to === res.translations[0].detected_source_language; item.text,
break; item.detected_source_language,
]);
case OPT_TRANS_DEEPLFREE: case OPT_TRANS_DEEPLFREE:
trText = res.result?.texts.map((item) => item.text).join(" "); return [
isSame = to === res.result?.lang; [
break; res?.result?.texts?.map((item) => item.text).join(" "),
res?.result?.lang,
],
];
case OPT_TRANS_DEEPLX: case OPT_TRANS_DEEPLX:
trText = res.data; return [[res?.data, res?.source_lang]];
isSame = to === res.source_lang;
break;
case OPT_TRANS_NIUTRANS: case OPT_TRANS_NIUTRANS:
const json = JSON.parse(res); const json = JSON.parse(res);
if (json.error_msg) { if (json.error_msg) {
throw new Error(json.error_msg); throw new Error(json.error_msg);
} }
trText = json.tgt_text; return [[json.tgt_text, json.from]];
isSame = to === json.from;
break;
case OPT_TRANS_BAIDU: case OPT_TRANS_BAIDU:
// trText = res.trans_result?.data.map((item) => item.dst).join(" ");
// isSame = res.trans_result?.to === res.trans_result?.from;
if (res.type === 1) { if (res.type === 1) {
trText = Object.keys(JSON.parse(res.result).content[0].mean[0].cont)[0]; return [
isSame = to === res.from; [
Object.keys(JSON.parse(res.result).content[0].mean[0].cont)[0],
res.from,
],
];
} else if (res.type === 2) { } else if (res.type === 2) {
trText = res.data.map((item) => item.dst).join(" "); return [[res.data.map((item) => item.dst).join(" "), res.from]];
isSame = to === res.from;
} }
break; break;
case OPT_TRANS_TENCENT: case OPT_TRANS_TENCENT:
trText = res?.auto_translation?.[0]; return res?.auto_translation?.map((text) => [text, res?.src_lang]);
isSame = text === trText;
break;
case OPT_TRANS_VOLCENGINE: case OPT_TRANS_VOLCENGINE:
trText = res?.translation || ""; return new Map([[0, [res?.translation, res?.detected_language]]]);
isSame = to === res?.detected_language;
break;
case OPT_TRANS_OPENAI: case OPT_TRANS_OPENAI:
case OPT_TRANS_OPENAI_2: case OPT_TRANS_OPENAI_2:
case OPT_TRANS_OPENAI_3: case OPT_TRANS_OPENAI_3:
case OPT_TRANS_GEMINI_2: case OPT_TRANS_GEMINI_2:
case OPT_TRANS_OPENROUTER: case OPT_TRANS_OPENROUTER:
trText = res?.choices?.map((item) => item.message.content).join(" "); return parseTranslations(res?.choices?.[0]?.message?.content ?? "");
isSame = text === trText;
break;
case OPT_TRANS_GEMINI: case OPT_TRANS_GEMINI:
trText = res?.candidates return parseTranslations(
?.map((item) => item.content?.parts.map((item) => item.text).join(" ")) res?.candidates?.[0]?.content?.parts?.[0]?.text ?? ""
.join(" "); );
isSame = text === trText;
break;
case OPT_TRANS_CLAUDE: case OPT_TRANS_CLAUDE:
trText = res?.content?.map((item) => item.text).join(" "); return parseTranslations(res?.content?.[0]?.text ?? "");
isSame = text === trText;
break;
case OPT_TRANS_CLOUDFLAREAI: case OPT_TRANS_CLOUDFLAREAI:
trText = res?.result?.translated_text; return [[res?.result?.translated_text]];
isSame = text === trText;
break;
case OPT_TRANS_OLLAMA: case OPT_TRANS_OLLAMA:
case OPT_TRANS_OLLAMA_2: case OPT_TRANS_OLLAMA_2:
case OPT_TRANS_OLLAMA_3: case OPT_TRANS_OLLAMA_3:
const { thinkIgnore = "" } = apiSetting; // const deepModels = thinkIgnore.split(",").filter((model) => model.trim());
const deepModels = thinkIgnore.split(",").filter((model) => model.trim()); // if (deepModels.some((model) => res?.model?.startsWith(model))) {
if (deepModels.some((model) => res?.model?.startsWith(model))) { // trText = res?.response.replace(/<think>[\s\S]*<\/think>/i, "");
trText = res?.response.replace(/<think>[\s\S]*<\/think>/i, ""); // } else {
} else { // trText = res?.response;
trText = res?.response; // }
} return parseTranslations(res?.response ?? "");
isSame = text === trText;
break;
case OPT_TRANS_CUSTOMIZE: case OPT_TRANS_CUSTOMIZE:
case OPT_TRANS_CUSTOMIZE_2: case OPT_TRANS_CUSTOMIZE_2:
case OPT_TRANS_CUSTOMIZE_3: case OPT_TRANS_CUSTOMIZE_3:
case OPT_TRANS_CUSTOMIZE_4: case OPT_TRANS_CUSTOMIZE_4:
case OPT_TRANS_CUSTOMIZE_5: case OPT_TRANS_CUSTOMIZE_5:
const { resHook } = apiSetting;
if (resHook?.trim()) { if (resHook?.trim()) {
interpreter.run(`exports.resHook = ${resHook}`); interpreter.run(`exports.resHook = ${resHook}`);
[trText, isSame] = interpreter.exports.resHook(res, text, from, to); return interpreter.exports.resHook({ res, texts, from, to });
} else { } else {
trText = res.text; return res?.map((item) => [item.text, item.src]);
isSame = to === res.from;
} }
break;
default: default:
} }
return [trText, isSame]; return [];
};
/**
* 发送翻译请求并解析
* @param {*} param0
* @returns
*/
export const fetchTranslate = async ({
translator,
texts,
from,
to,
docInfo,
apiSetting,
usePool,
}) => {
const [input, init] = await genTransReq(translator, {
texts,
from,
to,
docInfo,
...apiSetting,
});
const res = await fetchData(input, init, {
useCache: false,
usePool,
fetchInterval: apiSetting.fetchInterval,
fetchLimit: apiSetting.fetchLimit,
httpTimeout: apiSetting.httpTimeout,
});
if (!res) {
throw new Error("tranlate got empty response");
}
return parseTransRes(translator, res, {
texts,
from,
to,
...apiSetting,
});
}; };

View File

@@ -1,6 +1,9 @@
export const DEFAULT_HTTP_TIMEOUT = 10000; // 调用超时时间 export const DEFAULT_HTTP_TIMEOUT = 10000; // 调用超时时间
export const DEFAULT_FETCH_LIMIT = 10; // 默认最大任务数量 export const DEFAULT_FETCH_LIMIT = 10; // 默认最大任务数量
export const DEFAULT_FETCH_INTERVAL = 100; // 默认任务间隔时间 export const DEFAULT_FETCH_INTERVAL = 100; // 默认任务间隔时间
export const DEFAULT_BATCH_INTERVAL = 1000; // 批处理请求间隔时间
export const DEFAULT_BATCH_SIZE = 10; // 每次最多发送段落数量
export const DEFAULT_BATCH_LENGTH = 10000; // 每次发送最大文字数量
export const INPUT_PLACE_URL = "{{url}}"; // 占位符 export const INPUT_PLACE_URL = "{{url}}"; // 占位符
export const INPUT_PLACE_FROM = "{{from}}"; // 占位符 export const INPUT_PLACE_FROM = "{{from}}"; // 占位符
@@ -66,6 +69,28 @@ export const OPT_TRANS_ALL = [
OPT_TRANS_CUSTOMIZE_5, OPT_TRANS_CUSTOMIZE_5,
]; ];
export const OPT_TRANS_BATCH = new Set([
OPT_TRANS_GOOGLE_2,
OPT_TRANS_MICROSOFT,
OPT_TRANS_TENCENT,
OPT_TRANS_DEEPL,
OPT_TRANS_OPENAI,
OPT_TRANS_OPENAI_2,
OPT_TRANS_OPENAI_3,
OPT_TRANS_GEMINI,
OPT_TRANS_GEMINI_2,
OPT_TRANS_CLAUDE,
OPT_TRANS_OLLAMA,
OPT_TRANS_OLLAMA_2,
OPT_TRANS_OLLAMA_3,
OPT_TRANS_OPENROUTER,
OPT_TRANS_CUSTOMIZE,
OPT_TRANS_CUSTOMIZE_2,
OPT_TRANS_CUSTOMIZE_3,
OPT_TRANS_CUSTOMIZE_4,
OPT_TRANS_CUSTOMIZE_5,
]);
export const OPT_LANGDETECTOR_ALL = [ export const OPT_LANGDETECTOR_ALL = [
OPT_TRANS_GOOGLE, OPT_TRANS_GOOGLE,
OPT_TRANS_MICROSOFT, OPT_TRANS_MICROSOFT,
@@ -247,23 +272,18 @@ export const OPT_LANGS_SPECIAL = {
]), ]),
[OPT_TRANS_CUSTOMIZE]: new Map([ [OPT_TRANS_CUSTOMIZE]: new Map([
...OPT_LANGS_FROM.map(([key]) => [key, key]), ...OPT_LANGS_FROM.map(([key]) => [key, key]),
["auto", ""],
]), ]),
[OPT_TRANS_CUSTOMIZE_2]: new Map([ [OPT_TRANS_CUSTOMIZE_2]: new Map([
...OPT_LANGS_FROM.map(([key]) => [key, key]), ...OPT_LANGS_FROM.map(([key]) => [key, key]),
["auto", ""],
]), ]),
[OPT_TRANS_CUSTOMIZE_3]: new Map([ [OPT_TRANS_CUSTOMIZE_3]: new Map([
...OPT_LANGS_FROM.map(([key]) => [key, key]), ...OPT_LANGS_FROM.map(([key]) => [key, key]),
["auto", ""],
]), ]),
[OPT_TRANS_CUSTOMIZE_4]: new Map([ [OPT_TRANS_CUSTOMIZE_4]: new Map([
...OPT_LANGS_FROM.map(([key]) => [key, key]), ...OPT_LANGS_FROM.map(([key]) => [key, key]),
["auto", ""],
]), ]),
[OPT_TRANS_CUSTOMIZE_5]: new Map([ [OPT_TRANS_CUSTOMIZE_5]: new Map([
...OPT_LANGS_FROM.map(([key]) => [key, key]), ...OPT_LANGS_FROM.map(([key]) => [key, key]),
["auto", ""],
]), ]),
}; };
export const OPT_LANGS_LIST = OPT_LANGS_TO.map(([lang]) => lang); export const OPT_LANGS_LIST = OPT_LANGS_TO.map(([lang]) => lang);
@@ -294,8 +314,30 @@ const defaultApi = {
url: "", url: "",
key: "", key: "",
model: "", // 模型名称 model: "", // 模型名称
systemPrompt: `You are a professional, authentic machine translation engine.`, systemPrompt: `You are a translation API.
userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`,
Output:
- Return one raw JSON object only.
- Start with "{" and end with "}".
- No fences or extra text.
Input JSON:
{"targetLanguage":"<lang>","title":"<title>","description":"<desc>","segments":[{"id":1,"text":"..."}]}
Output JSON:
{"translations":[{"id":1,"text":"...","sourceLanguage":"<detected-language>"}]}
Rules:
1. Use title/description as context only, do not output them.
2. Keep ids/order/count.
3. Translate inner text only, not HTML tags.
4. Do not translate <code>, <pre>, backticks, or terms like React, Docker, JavaScript, API.
5. Preserve whitespace & entities.
6. Automatically detect the source language of each segment and add it in the "sourceLanguage" field.
7. Empty/unchanged input → unchanged.
Fail-safe: {"translations":[]}`,
userPrompt: `${INPUT_PLACE_TEXT}`,
customHeader: "", customHeader: "",
customBody: "", customBody: "",
reqHook: "", // request 钩子函数 reqHook: "", // request 钩子函数
@@ -303,28 +345,31 @@ const defaultApi = {
fetchLimit: DEFAULT_FETCH_LIMIT, // 最大请求数量 fetchLimit: DEFAULT_FETCH_LIMIT, // 最大请求数量
fetchInterval: DEFAULT_FETCH_INTERVAL, // 请求间隔时间 fetchInterval: DEFAULT_FETCH_INTERVAL, // 请求间隔时间
httpTimeout: DEFAULT_HTTP_TIMEOUT, // 请求超时时间 httpTimeout: DEFAULT_HTTP_TIMEOUT, // 请求超时时间
batchInterval: DEFAULT_BATCH_INTERVAL, // 批处理请求间隔时间
batchSize: DEFAULT_BATCH_SIZE, // 每次最多发送段落数量
batchLength: DEFAULT_BATCH_LENGTH, // 每次发送最大文字数量
isBatchFetch: false, // 是否启用聚合发送请求
isRichText: false, // 是否启用富文本翻译
isContext: false, // 是否启用智能上下文
temperature: 0, temperature: 0,
maxTokens: 2048, maxTokens: 20480,
think: false, think: false,
thinkIgnore: "qwen3,deepseek-r1", thinkIgnore: "qwen3,deepseek-r1",
isDisabled: false, // 是否不显示 isDisabled: false, // 是否不显示
}; };
const defaultCustomApi = { const defaultCustomApi = {
...defaultApi, ...defaultApi,
url: "https://translate.googleapis.com/translate_a/single?client=gtx&dj=1&dt=t&ie=UTF-8&q={{text}}&sl=en&tl=zh-CN",
reqHook: `// Request Hook reqHook: `// Request Hook
(text, from, to, url, key) => [ (text, from, to, url, key) => [url, {
url,
{
headers: { headers: {
"Content-Type": "application/json", "Content-type": "application/json",
Authorization: \`Bearer \${key}\`,
}, },
method: "GET", method: "GET",
body: JSON.stringify({ text, from, to }), body: null,
}, }]`,
];`,
resHook: `// Response Hook resHook: `// Response Hook
(res, text, from, to) => [res.text, to === res.src];`, (res, text, from, to) => [res.sentences.map((item) => item.trans).join(" "), to === res.src]`,
}; };
const defaultOpenaiApi = { const defaultOpenaiApi = {
...defaultApi, ...defaultApi,
@@ -350,11 +395,13 @@ export const DEFAULT_TRANS_APIS = {
apiName: OPT_TRANS_GOOGLE_2, apiName: OPT_TRANS_GOOGLE_2,
url: "https://translate-pa.googleapis.com/v1/translateHtml", url: "https://translate-pa.googleapis.com/v1/translateHtml",
key: "AIzaSyATBXajvzQLTDHEQbcpq0Ihe0vWDHmO520", key: "AIzaSyATBXajvzQLTDHEQbcpq0Ihe0vWDHmO520",
isBatchFetch: true,
}, },
[OPT_TRANS_MICROSOFT]: { [OPT_TRANS_MICROSOFT]: {
...defaultApi, ...defaultApi,
apiSlug: OPT_TRANS_MICROSOFT, apiSlug: OPT_TRANS_MICROSOFT,
apiName: OPT_TRANS_MICROSOFT, apiName: OPT_TRANS_MICROSOFT,
isBatchFetch: true,
}, },
[OPT_TRANS_BAIDU]: { [OPT_TRANS_BAIDU]: {
...defaultApi, ...defaultApi,
@@ -365,6 +412,7 @@ export const DEFAULT_TRANS_APIS = {
...defaultApi, ...defaultApi,
apiSlug: OPT_TRANS_TENCENT, apiSlug: OPT_TRANS_TENCENT,
apiName: OPT_TRANS_TENCENT, apiName: OPT_TRANS_TENCENT,
isBatchFetch: true,
}, },
[OPT_TRANS_VOLCENGINE]: { [OPT_TRANS_VOLCENGINE]: {
...defaultApi, ...defaultApi,
@@ -376,7 +424,7 @@ export const DEFAULT_TRANS_APIS = {
apiSlug: OPT_TRANS_DEEPL, apiSlug: OPT_TRANS_DEEPL,
apiName: OPT_TRANS_DEEPL, apiName: OPT_TRANS_DEEPL,
url: "https://api-free.deepl.com/v2/translate", url: "https://api-free.deepl.com/v2/translate",
fetchLimit: 1, isBatchFetch: true,
}, },
[OPT_TRANS_DEEPLFREE]: { [OPT_TRANS_DEEPLFREE]: {
...defaultApi, ...defaultApi,

View File

@@ -1228,4 +1228,24 @@ export const I18N = {
en: `If translate selected`, en: `If translate selected`,
zh_TW: `是否啟用劃詞翻譯`, zh_TW: `是否啟用劃詞翻譯`,
}, },
is_batch_fetch: {
zh: `是否聚合发送翻译请求`,
en: `Whether to aggregate and send translation requests`,
zh_TW: `是否聚合發送翻譯請求`,
},
batch_interval: {
zh: `聚合请求等待时间(100-5000)`,
en: `Aggregation request waiting time (100-5000)`,
zh_TW: `聚合請求等待時間(100-5000)`,
},
batch_size: {
zh: `聚合请求最大段落数(1-100)`,
en: `Maximum number of paragraphs in an aggregation request (1-100)`,
zh_TW: `聚合請求最大段落數(1-100)`,
},
batch_length: {
zh: `聚合请求最大文本长度(500-50000)`,
en: `Maximum text length for aggregation requests (500-50000)`,
zh_TW: `聚合請求最大文字長度(500-50000)`,
},
}; };

View File

@@ -12,7 +12,7 @@ import { kissLog } from "../libs/log";
* @param {*} setting * @param {*} setting
* @returns * @returns
*/ */
export function useTranslate(q, rule, setting) { export function useTranslate(q, rule, setting, docInfo) {
const [text, setText] = useState(""); const [text, setText] = useState("");
const [loading, setLoading] = useState(true); const [loading, setLoading] = useState(true);
const [sameLang, setSamelang] = useState(false); const [sameLang, setSamelang] = useState(false);
@@ -48,6 +48,7 @@ export function useTranslate(q, rule, setting) {
...DEFAULT_TRANS_APIS[translator], ...DEFAULT_TRANS_APIS[translator],
...(setting.transApis[translator] || {}), ...(setting.transApis[translator] || {}),
}, },
docInfo,
}); });
setText(trText); setText(trText);
setSamelang(isSame); setSamelang(isSame);
@@ -58,7 +59,16 @@ export function useTranslate(q, rule, setting) {
setLoading(false); setLoading(false);
} }
})(); })();
}, [q, translator, fromLang, toLang, detectRemote, skipLangs, setting]); }, [
q,
translator,
fromLang,
toLang,
detectRemote,
skipLangs,
setting,
docInfo,
]);
return { text, sameLang, loading }; return { text, sameLang, loading };
} }

153
src/libs/batchQueue.js Normal file
View File

@@ -0,0 +1,153 @@
import { fetchTranslate } from "../apis/trans";
/**
* 批处理队列
* @param {*} translator
* @param {*} param1
* @returns
*/
const batchQueue = (
{ translator, from, to, docInfo, apiSetting, usePool },
{ batchInterval = 1000, batchSize = 10, batchLength = 10000 } = {}
) => {
const queue = [];
let isProcessing = false;
let timer = null;
const sendBatchRequest = async (payloads) => {
const texts = payloads.map((item) => item.text);
return fetchTranslate({
translator,
texts,
from,
to,
docInfo,
apiSetting,
usePool,
});
};
const processQueue = async () => {
if (timer) {
clearTimeout(timer);
timer = null;
}
if (queue.length === 0 || isProcessing) {
return;
}
isProcessing = true;
let tasksToProcess = [];
let currentBatchLength = 0;
let endIndex = 0;
for (const task of queue) {
const textLength = task.payload.text?.length || 0;
if (
endIndex >= batchSize ||
(currentBatchLength + textLength > batchLength && endIndex > 0)
) {
break;
}
currentBatchLength += textLength;
endIndex++;
}
if (endIndex > 0) {
tasksToProcess = queue.splice(0, endIndex);
}
if (tasksToProcess.length === 0) {
isProcessing = false;
return;
}
try {
const payloads = tasksToProcess.map((item) => item.payload);
const responses = await sendBatchRequest(payloads);
tasksToProcess.forEach((taskItem, index) => {
const response = responses[index];
if (response) {
taskItem.resolve(response);
} else {
taskItem.reject(new Error(`No response for item at index ${index}`));
}
});
} catch (error) {
tasksToProcess.forEach((taskItem) => taskItem.reject(error));
} finally {
isProcessing = false;
if (queue.length > 0) {
if (queue.length >= batchSize) {
setTimeout(processQueue, 0);
} else {
scheduleProcessing();
}
}
}
};
const scheduleProcessing = () => {
if (!isProcessing && !timer && queue.length > 0) {
timer = setTimeout(processQueue, batchInterval);
}
};
const addTask = (data) => {
return new Promise((resolve, reject) => {
const payload = data;
queue.push({ payload, resolve, reject });
if (queue.length >= batchSize) {
processQueue();
} else {
scheduleProcessing();
}
});
};
const destroy = () => {
if (timer) {
clearTimeout(timer);
timer = null;
}
queue.forEach((task) =>
task.reject(new Error("Queue instance was destroyed."))
);
queue.length = 0;
};
return { addTask, destroy };
};
// 实例字典
const queueMap = new Map();
/**
* 获取批处理实例
* @param {*} translator
* @returns
*/
export const getBatchQueue = (args, opts) => {
const { translator, from, to } = args;
const key = `${translator}_${from}_${to}`;
if (queueMap.has(key)) {
return queueMap.get(key);
}
const queue = batchQueue(args, opts);
queueMap.set(key, queue);
return queue;
};
/**
* 清除所有任务
*/
export const clearAllBatchQueue = () => {
for (const queue of queueMap.entries()) {
queue.destroy();
}
};

View File

@@ -24,6 +24,7 @@ import { isExt } from "./client";
import { injectInlineJs, injectInternalCss } from "./injector"; import { injectInlineJs, injectInternalCss } from "./injector";
import { kissLog } from "./log"; import { kissLog } from "./log";
import interpreter from "./interpreter"; import interpreter from "./interpreter";
import { clearAllBatchQueue } from "./batchQueue";
/** /**
* 翻译类 * 翻译类
@@ -54,6 +55,7 @@ export class Translator {
_keepSelector = ""; _keepSelector = "";
_terms = []; _terms = [];
_docTitle = ""; _docTitle = "";
_docDescription = "";
// 显示 // 显示
_interseObserver = new IntersectionObserver( _interseObserver = new IntersectionObserver(
@@ -95,6 +97,11 @@ export class Translator {
}); });
}); });
_getDocDescription = () => {
const meta = document.querySelector('meta[name="description"]');
return meta ? meta.getAttribute("content") : "";
};
// 插入 shadowroot // 插入 shadowroot
_overrideAttachShadow = () => { _overrideAttachShadow = () => {
const _this = this; const _this = this;
@@ -110,6 +117,8 @@ export class Translator {
this._setting = setting; this._setting = setting;
this._rule = rule; this._rule = rule;
this._docTitle = document.title;
this._docDescription = this._getDocDescription();
this._keepSelector = rule.keepSelector || ""; this._keepSelector = rule.keepSelector || "";
this._terms = (rule.terms || "") this._terms = (rule.terms || "")
@@ -126,6 +135,13 @@ export class Translator {
return this._setting; return this._setting;
} }
get docInfo() {
return {
title: this._docTitle,
description: this._docDescription,
};
}
get eventName() { get eventName() {
return this._eventName; return this._eventName;
} }
@@ -426,6 +442,7 @@ export class Translator {
// 清空任务池 // 清空任务池
clearFetchPool(); clearFetchPool();
clearAllBatchQueue();
}; };
_removeInjector = () => { _removeInjector = () => {

View File

@@ -289,3 +289,16 @@ export const parseJsonObj = (str) => {
return {}; return {};
}; };
/**
* 提取json内容
* @param {*} s
* @returns
*/
export const extractJson = (raw) => {
if (!raw) return "{}";
let s = raw.replace(/^\s*```(?:json)?\s*/i, "").replace(/\s*```\s*$/i, "");
const match = s.match(/\{[\s\S]*\}/);
return match ? match[0] : "{}";
};

View File

@@ -98,7 +98,12 @@ const StyledSpan = styled("span")`
export default function Content({ q, keeps, translator, $el }) { export default function Content({ q, keeps, translator, $el }) {
const [rule, setRule] = useState(translator.rule); const [rule, setRule] = useState(translator.rule);
const { text, sameLang, loading } = useTranslate(q, rule, translator.setting); const { text, sameLang, loading } = useTranslate(
q,
rule,
translator.setting,
translator.docInfo
);
const { const {
transOpen, transOpen,
textStyle, textStyle,

View File

@@ -34,6 +34,10 @@ import {
DEFAULT_FETCH_LIMIT, DEFAULT_FETCH_LIMIT,
DEFAULT_FETCH_INTERVAL, DEFAULT_FETCH_INTERVAL,
DEFAULT_HTTP_TIMEOUT, DEFAULT_HTTP_TIMEOUT,
OPT_TRANS_BATCH,
DEFAULT_BATCH_INTERVAL,
DEFAULT_BATCH_SIZE,
DEFAULT_BATCH_LENGTH,
} from "../../config"; } from "../../config";
import { useState } from "react"; import { useState } from "react";
import { useI18n } from "../../hooks/I18n"; import { useI18n } from "../../hooks/I18n";
@@ -140,6 +144,10 @@ function ApiFields({ translator, api, updateApi, resetApi }) {
maxTokens = 256, maxTokens = 256,
apiName = "", apiName = "",
isDisabled = false, isDisabled = false,
isBatchFetch = false,
batchInterval = DEFAULT_BATCH_INTERVAL,
batchSize = DEFAULT_BATCH_SIZE,
batchLength = DEFAULT_BATCH_LENGTH,
} = api; } = api;
const handleChange = (e) => { const handleChange = (e) => {
@@ -160,6 +168,15 @@ function ApiFields({ translator, api, updateApi, resetApi }) {
case "maxTokens": case "maxTokens":
value = limitNumber(value, 0, 2 ** 15); value = limitNumber(value, 0, 2 ** 15);
break; break;
case "batchInterval":
value = limitNumber(value, 100, 5000);
break;
case "batchSize":
value = limitNumber(value, 1, 100);
break;
case "batchLength":
value = limitNumber(value, 500, 50000);
break;
default: default:
} }
updateApi({ updateApi({
@@ -394,6 +411,50 @@ function ApiFields({ translator, api, updateApi, resetApi }) {
</> </>
)} )}
{OPT_TRANS_BATCH.has(translator) && (
<>
<TextField
select
size="small"
name="isBatchFetch"
value={isBatchFetch}
label={i18n("is_batch_fetch")}
onChange={handleChange}
>
<MenuItem value={false}>{i18n("disable")}</MenuItem>
<MenuItem value={true}>{i18n("enable")}</MenuItem>
</TextField>
{isBatchFetch && (
<>
<TextField
size="small"
label={i18n("batch_interval")}
type="number"
name="batchInterval"
value={batchInterval}
onChange={handleChange}
/>
<TextField
size="small"
label={i18n("batch_size")}
type="number"
name="batchSize"
value={batchSize}
onChange={handleChange}
/>
<TextField
size="small"
label={i18n("batch_length")}
type="number"
name="batchLength"
value={batchLength}
onChange={handleChange}
/>
</>
)}
</>
)}
<TextField <TextField
size="small" size="small"
label={i18n("fetch_limit")} label={i18n("fetch_limit")}

View File

@@ -71,6 +71,7 @@ export default function FavWords() {
const tranList = []; const tranList = [];
for (const text of downloadList) { for (const text of downloadList) {
try { try {
// todo
const dictRes = await apiTranslate({ const dictRes = await apiTranslate({
text, text,
translator: OPT_TRANS_BAIDU, translator: OPT_TRANS_BAIDU,

View File

@@ -26,6 +26,7 @@ export default function DictCont({ text }) {
return; return;
} }
// todo
const dictRes = await apiTranslate({ const dictRes = await apiTranslate({
text, text,
translator: OPT_TRANS_BAIDU, translator: OPT_TRANS_BAIDU,