feat: Support batch fetch, and update AI prompt
This commit is contained in:
@@ -1,11 +1,11 @@
|
||||
import queryString from "query-string";
|
||||
import { DEFAULT_USER_AGENT } from "../config";
|
||||
|
||||
export const genBaidu = async ({ text, from, to }) => {
|
||||
export const genBaidu = async ({ texts, from, to }) => {
|
||||
const data = {
|
||||
from,
|
||||
to,
|
||||
query: text,
|
||||
query: texts.join(" "),
|
||||
source: "txt",
|
||||
};
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
let id = 1e4 * Math.round(1e4 * Math.random());
|
||||
|
||||
export const genDeeplFree = ({ text, from, to }) => {
|
||||
export const genDeeplFree = ({ texts, from, to }) => {
|
||||
const text = texts.join(" ");
|
||||
const iCount = (text.match(/[i]/g) || []).length + 1;
|
||||
let timestamp = Date.now();
|
||||
timestamp = timestamp + (iCount - (timestamp % iCount));
|
||||
|
||||
@@ -7,12 +7,14 @@ import {
|
||||
OPT_LANGS_TENCENT,
|
||||
OPT_LANGS_SPECIAL,
|
||||
OPT_LANGS_MICROSOFT,
|
||||
OPT_TRANS_BATCH,
|
||||
} from "../config";
|
||||
import { sha256 } from "../libs/utils";
|
||||
import { msAuth } from "../libs/auth";
|
||||
import { kissLog } from "../libs/log";
|
||||
import { genTransReq, parseTransRes } from "./trans";
|
||||
import { fetchTranslate } from "./trans";
|
||||
import { getHttpCachePolyfill, putHttpCachePolyfill } from "../libs/cache";
|
||||
import { getBatchQueue } from "../libs/batchQueue";
|
||||
|
||||
/**
|
||||
* 同步数据
|
||||
@@ -203,13 +205,10 @@ export const apiTranslate = async ({
|
||||
fromLang,
|
||||
toLang,
|
||||
apiSetting = {},
|
||||
docInfo = {},
|
||||
useCache = true,
|
||||
usePool = true,
|
||||
}) => {
|
||||
let cacheInput; // 缓存URL
|
||||
let resCache; // 缓存对象
|
||||
let res; // 翻译接口返回的JSON数据
|
||||
|
||||
if (!text) {
|
||||
return ["", false];
|
||||
}
|
||||
@@ -223,57 +222,59 @@ export const apiTranslate = async ({
|
||||
return ["", false];
|
||||
}
|
||||
|
||||
// TODO: 优化缓存失效因素
|
||||
const [v1, v2] = process.env.REACT_APP_VERSION.split(".");
|
||||
const cacheOpts = {
|
||||
translator,
|
||||
text,
|
||||
fromLang,
|
||||
toLang,
|
||||
model: apiSetting.model, // model改变,缓存失效
|
||||
version: [v1, v2].join("."),
|
||||
};
|
||||
const cacheInput = `${URL_CACHE_TRAN}?${queryString.stringify(cacheOpts)}`;
|
||||
|
||||
// 查询缓存数据
|
||||
// TODO: 优化缓存失效因素
|
||||
if (useCache) {
|
||||
const [v1, v2] = process.env.REACT_APP_VERSION.split(".");
|
||||
const cacheOpts = {
|
||||
translator,
|
||||
text,
|
||||
fromLang,
|
||||
toLang,
|
||||
userPrompt: apiSetting.userPrompt, // prompt改变,缓存失效
|
||||
model: apiSetting.model, // model改变,缓存失效
|
||||
version: [v1, v2].join("."),
|
||||
};
|
||||
cacheInput = `${URL_CACHE_TRAN}?${queryString.stringify(cacheOpts)}`;
|
||||
resCache = await getHttpCachePolyfill(cacheInput);
|
||||
const cache = (await getHttpCachePolyfill(cacheInput)) || {};
|
||||
if (cache.trText) {
|
||||
return [cache.trText, cache.isSame];
|
||||
}
|
||||
}
|
||||
|
||||
// 请求接口数据
|
||||
if (!resCache) {
|
||||
const [input, init] = await genTransReq(translator, {
|
||||
text,
|
||||
let trText = "";
|
||||
let srLang = "";
|
||||
if (apiSetting.isBatchFetch && OPT_TRANS_BATCH.has(translator)) {
|
||||
const queue = getBatchQueue(
|
||||
{ translator, from, to, docInfo, apiSetting, usePool },
|
||||
apiSetting
|
||||
);
|
||||
const tranlation = await queue.addTask({ text });
|
||||
if (Array.isArray(tranlation)) {
|
||||
[trText, srLang = ""] = tranlation;
|
||||
}
|
||||
} else {
|
||||
const translations = await fetchTranslate({
|
||||
translator,
|
||||
texts: [text],
|
||||
from,
|
||||
to,
|
||||
...apiSetting,
|
||||
});
|
||||
res = await fetchData(input, init, {
|
||||
useCache: false,
|
||||
docInfo,
|
||||
apiSetting,
|
||||
usePool,
|
||||
fetchInterval: apiSetting.fetchInterval,
|
||||
fetchLimit: apiSetting.fetchLimit,
|
||||
httpTimeout: apiSetting.httpTimeout,
|
||||
});
|
||||
} else {
|
||||
res = resCache;
|
||||
if (Array.isArray(translations?.[0])) {
|
||||
[trText, srLang = ""] = translations[0];
|
||||
}
|
||||
}
|
||||
|
||||
if (!res) {
|
||||
return ["", false];
|
||||
}
|
||||
|
||||
// 解析返回数据
|
||||
const [trText, isSame] = parseTransRes(translator, res, apiSetting, {
|
||||
text,
|
||||
from,
|
||||
to,
|
||||
});
|
||||
const isSame = srLang && (to.includes(srLang) || srLang.includes(to));
|
||||
|
||||
// 插入缓存
|
||||
if (useCache && !resCache && trText) {
|
||||
await putHttpCachePolyfill(cacheInput, null, res);
|
||||
if (useCache && trText) {
|
||||
await putHttpCachePolyfill(cacheInput, null, { trText, isSame, srLang });
|
||||
}
|
||||
|
||||
return [trText, isSame, res];
|
||||
return [trText, isSame];
|
||||
};
|
||||
|
||||
@@ -26,7 +26,6 @@ import {
|
||||
OPT_TRANS_CUSTOMIZE_3,
|
||||
OPT_TRANS_CUSTOMIZE_4,
|
||||
OPT_TRANS_CUSTOMIZE_5,
|
||||
INPUT_PLACE_URL,
|
||||
INPUT_PLACE_FROM,
|
||||
INPUT_PLACE_TO,
|
||||
INPUT_PLACE_TEXT,
|
||||
@@ -37,7 +36,9 @@ import { msAuth } from "../libs/auth";
|
||||
import { genDeeplFree } from "./deepl";
|
||||
import { genBaidu } from "./baidu";
|
||||
import interpreter from "../libs/interpreter";
|
||||
import { parseJsonObj } from "../libs/utils";
|
||||
import { parseJsonObj, extractJson } from "../libs/utils";
|
||||
import { kissLog } from "../libs/log";
|
||||
import { fetchData } from "../libs/fetch";
|
||||
|
||||
const keyMap = new Map();
|
||||
const urlMap = new Map();
|
||||
@@ -60,7 +61,48 @@ const keyPick = (translator, key = "", cacheMap) => {
|
||||
return keys[curIndex];
|
||||
};
|
||||
|
||||
const genGoogle = ({ text, from, to, url, key }) => {
|
||||
const genSystemPrompt = ({ systemPrompt, from, to }) =>
|
||||
systemPrompt
|
||||
.replaceAll(INPUT_PLACE_FROM, from)
|
||||
.replaceAll(INPUT_PLACE_TO, to);
|
||||
|
||||
const genUserPrompt = ({ userPrompt, from, to, texts, docInfo }) => {
|
||||
const prompt = JSON.stringify({
|
||||
targetLanguage: to,
|
||||
title: docInfo.title,
|
||||
description: docInfo.description,
|
||||
segments: texts.map((text, i) => ({ id: i, text })),
|
||||
});
|
||||
|
||||
if (userPrompt.includes(INPUT_PLACE_TEXT)) {
|
||||
return userPrompt
|
||||
.replaceAll(INPUT_PLACE_FROM, from)
|
||||
.replaceAll(INPUT_PLACE_TO, to)
|
||||
.replaceAll(INPUT_PLACE_TEXT, prompt);
|
||||
}
|
||||
|
||||
return prompt;
|
||||
};
|
||||
|
||||
const parseTranslations = (raw) => {
|
||||
let data;
|
||||
|
||||
try {
|
||||
const jsonString = extractJson(raw);
|
||||
data = JSON.parse(jsonString);
|
||||
} catch (err) {
|
||||
kissLog(err, "parseTranslations");
|
||||
data = { translations: [] };
|
||||
}
|
||||
|
||||
if (!Array.isArray(data.translations)) {
|
||||
data.translations = [];
|
||||
}
|
||||
|
||||
return data.translations.map((item) => [item.text]);
|
||||
};
|
||||
|
||||
const genGoogle = ({ texts, from, to, url, key }) => {
|
||||
const params = {
|
||||
client: "gtx",
|
||||
dt: "t",
|
||||
@@ -68,7 +110,7 @@ const genGoogle = ({ text, from, to, url, key }) => {
|
||||
ie: "UTF-8",
|
||||
sl: from,
|
||||
tl: to,
|
||||
q: text,
|
||||
q: texts.join(" "),
|
||||
};
|
||||
const input = `${url}?${queryString.stringify(params)}`;
|
||||
const init = {
|
||||
@@ -83,8 +125,8 @@ const genGoogle = ({ text, from, to, url, key }) => {
|
||||
return [input, init];
|
||||
};
|
||||
|
||||
const genGoogle2 = ({ text, from, to, url, key }) => {
|
||||
const body = JSON.stringify([[[text], from, to], "wt_lib"]);
|
||||
const genGoogle2 = ({ texts, from, to, url, key }) => {
|
||||
const body = JSON.stringify([[texts, from, to], "wt_lib"]);
|
||||
const init = {
|
||||
method: "POST",
|
||||
headers: {
|
||||
@@ -97,7 +139,7 @@ const genGoogle2 = ({ text, from, to, url, key }) => {
|
||||
return [url, init];
|
||||
};
|
||||
|
||||
const genMicrosoft = async ({ text, from, to }) => {
|
||||
const genMicrosoft = async ({ texts, from, to }) => {
|
||||
const [token] = await msAuth();
|
||||
const params = {
|
||||
from,
|
||||
@@ -111,15 +153,15 @@ const genMicrosoft = async ({ text, from, to }) => {
|
||||
Authorization: `Bearer ${token}`,
|
||||
},
|
||||
method: "POST",
|
||||
body: JSON.stringify([{ Text: text }]),
|
||||
body: JSON.stringify(texts.map((text) => ({ Text: text }))),
|
||||
};
|
||||
|
||||
return [input, init];
|
||||
};
|
||||
|
||||
const genDeepl = ({ text, from, to, url, key }) => {
|
||||
const genDeepl = ({ texts, from, to, url, key }) => {
|
||||
const data = {
|
||||
text: [text],
|
||||
text: texts,
|
||||
target_lang: to,
|
||||
source_lang: from,
|
||||
// split_sentences: "0",
|
||||
@@ -136,9 +178,9 @@ const genDeepl = ({ text, from, to, url, key }) => {
|
||||
return [url, init];
|
||||
};
|
||||
|
||||
const genDeeplX = ({ text, from, to, url, key }) => {
|
||||
const genDeeplX = ({ texts, from, to, url, key }) => {
|
||||
const data = {
|
||||
text,
|
||||
text: texts.join(" "),
|
||||
target_lang: to,
|
||||
source_lang: from,
|
||||
};
|
||||
@@ -157,12 +199,12 @@ const genDeeplX = ({ text, from, to, url, key }) => {
|
||||
return [url, init];
|
||||
};
|
||||
|
||||
const genNiuTrans = ({ text, from, to, url, key, dictNo, memoryNo }) => {
|
||||
const genNiuTrans = ({ texts, from, to, url, key, dictNo, memoryNo }) => {
|
||||
const data = {
|
||||
from,
|
||||
to,
|
||||
apikey: key,
|
||||
src_text: text,
|
||||
src_text: texts.join(" "),
|
||||
dictNo,
|
||||
memoryNo,
|
||||
};
|
||||
@@ -178,7 +220,7 @@ const genNiuTrans = ({ text, from, to, url, key, dictNo, memoryNo }) => {
|
||||
return [url, init];
|
||||
};
|
||||
|
||||
const genTencent = ({ text, from, to }) => {
|
||||
const genTencent = ({ texts, from, to }) => {
|
||||
const data = {
|
||||
header: {
|
||||
fn: "auto_translation",
|
||||
@@ -188,7 +230,7 @@ const genTencent = ({ text, from, to }) => {
|
||||
type: "plain",
|
||||
model_category: "normal",
|
||||
source: {
|
||||
text_list: [text],
|
||||
text_list: texts,
|
||||
lang: from,
|
||||
},
|
||||
target: {
|
||||
@@ -211,11 +253,11 @@ const genTencent = ({ text, from, to }) => {
|
||||
return [input, init];
|
||||
};
|
||||
|
||||
const genVolcengine = ({ text, from, to }) => {
|
||||
const genVolcengine = ({ texts, from, to }) => {
|
||||
const data = {
|
||||
source_language: from,
|
||||
target_language: to,
|
||||
text: text,
|
||||
text: texts.join(" "),
|
||||
};
|
||||
|
||||
const input = "https://translate.volcengine.com/crx/translate/v1";
|
||||
@@ -231,7 +273,7 @@ const genVolcengine = ({ text, from, to }) => {
|
||||
};
|
||||
|
||||
const genOpenAI = ({
|
||||
text,
|
||||
texts,
|
||||
from,
|
||||
to,
|
||||
url,
|
||||
@@ -243,21 +285,10 @@ const genOpenAI = ({
|
||||
maxTokens,
|
||||
customHeader,
|
||||
customBody,
|
||||
docInfo,
|
||||
}) => {
|
||||
// 兼容历史上作为systemPrompt的prompt,如果prompt中不包含带翻译文本,则添加文本到prompt末尾
|
||||
// if (!prompt.includes(INPUT_PLACE_TEXT)) {
|
||||
// prompt += `\nSource Text: ${INPUT_PLACE_TEXT}`;
|
||||
// }
|
||||
systemPrompt = systemPrompt
|
||||
.replaceAll(INPUT_PLACE_FROM, from)
|
||||
.replaceAll(INPUT_PLACE_TO, to)
|
||||
.replaceAll(INPUT_PLACE_TEXT, text);
|
||||
userPrompt = userPrompt
|
||||
.replaceAll(INPUT_PLACE_FROM, from)
|
||||
.replaceAll(INPUT_PLACE_TO, to)
|
||||
.replaceAll(INPUT_PLACE_TEXT, text);
|
||||
|
||||
// TODO: 同时支持json对象和hook函数
|
||||
systemPrompt = genSystemPrompt({ systemPrompt, from, to });
|
||||
userPrompt = genUserPrompt({ userPrompt, from, to, texts, docInfo });
|
||||
customHeader = parseJsonObj(customHeader);
|
||||
customBody = parseJsonObj(customBody);
|
||||
|
||||
@@ -293,7 +324,7 @@ const genOpenAI = ({
|
||||
};
|
||||
|
||||
const genGemini = ({
|
||||
text,
|
||||
texts,
|
||||
from,
|
||||
to,
|
||||
url,
|
||||
@@ -305,19 +336,13 @@ const genGemini = ({
|
||||
maxTokens,
|
||||
customHeader,
|
||||
customBody,
|
||||
docInfo,
|
||||
}) => {
|
||||
url = url
|
||||
.replaceAll(INPUT_PLACE_MODEL, model)
|
||||
.replaceAll(INPUT_PLACE_KEY, key);
|
||||
systemPrompt = systemPrompt
|
||||
.replaceAll(INPUT_PLACE_FROM, from)
|
||||
.replaceAll(INPUT_PLACE_TO, to)
|
||||
.replaceAll(INPUT_PLACE_TEXT, text);
|
||||
userPrompt = userPrompt
|
||||
.replaceAll(INPUT_PLACE_FROM, from)
|
||||
.replaceAll(INPUT_PLACE_TO, to)
|
||||
.replaceAll(INPUT_PLACE_TEXT, text);
|
||||
|
||||
systemPrompt = genSystemPrompt({ systemPrompt, from, to });
|
||||
userPrompt = genUserPrompt({ userPrompt, from, to, texts, docInfo });
|
||||
customHeader = parseJsonObj(customHeader);
|
||||
customBody = parseJsonObj(customBody);
|
||||
|
||||
@@ -355,7 +380,7 @@ const genGemini = ({
|
||||
};
|
||||
|
||||
const genGemini2 = ({
|
||||
text,
|
||||
texts,
|
||||
from,
|
||||
to,
|
||||
url,
|
||||
@@ -367,16 +392,10 @@ const genGemini2 = ({
|
||||
maxTokens,
|
||||
customHeader,
|
||||
customBody,
|
||||
docInfo,
|
||||
}) => {
|
||||
systemPrompt = systemPrompt
|
||||
.replaceAll(INPUT_PLACE_FROM, from)
|
||||
.replaceAll(INPUT_PLACE_TO, to)
|
||||
.replaceAll(INPUT_PLACE_TEXT, text);
|
||||
userPrompt = userPrompt
|
||||
.replaceAll(INPUT_PLACE_FROM, from)
|
||||
.replaceAll(INPUT_PLACE_TO, to)
|
||||
.replaceAll(INPUT_PLACE_TEXT, text);
|
||||
|
||||
systemPrompt = genSystemPrompt({ systemPrompt, from, to });
|
||||
userPrompt = genUserPrompt({ userPrompt, from, to, texts, docInfo });
|
||||
customHeader = parseJsonObj(customHeader);
|
||||
customBody = parseJsonObj(customBody);
|
||||
|
||||
@@ -411,7 +430,7 @@ const genGemini2 = ({
|
||||
};
|
||||
|
||||
const genClaude = ({
|
||||
text,
|
||||
texts,
|
||||
from,
|
||||
to,
|
||||
url,
|
||||
@@ -423,16 +442,10 @@ const genClaude = ({
|
||||
maxTokens,
|
||||
customHeader,
|
||||
customBody,
|
||||
docInfo,
|
||||
}) => {
|
||||
systemPrompt = systemPrompt
|
||||
.replaceAll(INPUT_PLACE_FROM, from)
|
||||
.replaceAll(INPUT_PLACE_TO, to)
|
||||
.replaceAll(INPUT_PLACE_TEXT, text);
|
||||
userPrompt = userPrompt
|
||||
.replaceAll(INPUT_PLACE_FROM, from)
|
||||
.replaceAll(INPUT_PLACE_TO, to)
|
||||
.replaceAll(INPUT_PLACE_TEXT, text);
|
||||
|
||||
systemPrompt = genSystemPrompt({ systemPrompt, from, to });
|
||||
userPrompt = genUserPrompt({ userPrompt, from, to, texts, docInfo });
|
||||
customHeader = parseJsonObj(customHeader);
|
||||
customBody = parseJsonObj(customBody);
|
||||
|
||||
@@ -466,7 +479,7 @@ const genClaude = ({
|
||||
};
|
||||
|
||||
const genOpenRouter = ({
|
||||
text,
|
||||
texts,
|
||||
from,
|
||||
to,
|
||||
url,
|
||||
@@ -478,16 +491,10 @@ const genOpenRouter = ({
|
||||
maxTokens,
|
||||
customHeader,
|
||||
customBody,
|
||||
docInfo,
|
||||
}) => {
|
||||
systemPrompt = systemPrompt
|
||||
.replaceAll(INPUT_PLACE_FROM, from)
|
||||
.replaceAll(INPUT_PLACE_TO, to)
|
||||
.replaceAll(INPUT_PLACE_TEXT, text);
|
||||
userPrompt = userPrompt
|
||||
.replaceAll(INPUT_PLACE_FROM, from)
|
||||
.replaceAll(INPUT_PLACE_TO, to)
|
||||
.replaceAll(INPUT_PLACE_TEXT, text);
|
||||
|
||||
systemPrompt = genSystemPrompt({ systemPrompt, from, to });
|
||||
userPrompt = genUserPrompt({ userPrompt, from, to, texts, docInfo });
|
||||
customHeader = parseJsonObj(customHeader);
|
||||
customBody = parseJsonObj(customBody);
|
||||
|
||||
@@ -522,7 +529,7 @@ const genOpenRouter = ({
|
||||
};
|
||||
|
||||
const genOllama = ({
|
||||
text,
|
||||
texts,
|
||||
from,
|
||||
to,
|
||||
think,
|
||||
@@ -533,16 +540,10 @@ const genOllama = ({
|
||||
model,
|
||||
customHeader,
|
||||
customBody,
|
||||
docInfo,
|
||||
}) => {
|
||||
systemPrompt = systemPrompt
|
||||
.replaceAll(INPUT_PLACE_FROM, from)
|
||||
.replaceAll(INPUT_PLACE_TO, to)
|
||||
.replaceAll(INPUT_PLACE_TEXT, text);
|
||||
userPrompt = userPrompt
|
||||
.replaceAll(INPUT_PLACE_FROM, from)
|
||||
.replaceAll(INPUT_PLACE_TO, to)
|
||||
.replaceAll(INPUT_PLACE_TEXT, text);
|
||||
|
||||
systemPrompt = genSystemPrompt({ systemPrompt, from, to });
|
||||
userPrompt = genUserPrompt({ userPrompt, from, to, texts, docInfo });
|
||||
customHeader = parseJsonObj(customHeader);
|
||||
customBody = parseJsonObj(customBody);
|
||||
|
||||
@@ -570,9 +571,9 @@ const genOllama = ({
|
||||
return [url, init];
|
||||
};
|
||||
|
||||
const genCloudflareAI = ({ text, from, to, url, key }) => {
|
||||
const genCloudflareAI = ({ texts, from, to, url, key }) => {
|
||||
const data = {
|
||||
text,
|
||||
text: texts.join(" "),
|
||||
source_lang: from,
|
||||
target_lang: to,
|
||||
};
|
||||
@@ -589,36 +590,21 @@ const genCloudflareAI = ({ text, from, to, url, key }) => {
|
||||
return [url, init];
|
||||
};
|
||||
|
||||
const genCustom = ({ text, from, to, url, key, reqHook }) => {
|
||||
url = url
|
||||
.replaceAll(INPUT_PLACE_URL, url)
|
||||
.replaceAll(INPUT_PLACE_FROM, from)
|
||||
.replaceAll(INPUT_PLACE_TO, to)
|
||||
.replaceAll(INPUT_PLACE_TEXT, text)
|
||||
.replaceAll(INPUT_PLACE_KEY, key);
|
||||
let init = {};
|
||||
|
||||
const genCustom = ({ texts, from, to, url, key, reqHook, docInfo }) => {
|
||||
if (reqHook?.trim()) {
|
||||
interpreter.run(`exports.reqHook = ${reqHook}`);
|
||||
[url, init] = interpreter.exports.reqHook(text, from, to, url, key);
|
||||
return [url, init];
|
||||
return interpreter.exports.reqHook({ texts, from, to, url, key, docInfo });
|
||||
}
|
||||
|
||||
const data = {
|
||||
text,
|
||||
from,
|
||||
to,
|
||||
};
|
||||
init = {
|
||||
const data = { texts, from, to };
|
||||
const init = {
|
||||
headers: {
|
||||
"Content-type": "application/json",
|
||||
Authorization: `Bearer ${key}`,
|
||||
},
|
||||
method: "POST",
|
||||
body: JSON.stringify(data),
|
||||
};
|
||||
if (key) {
|
||||
init.headers.Authorization = `Bearer ${key}`;
|
||||
}
|
||||
|
||||
return [url, init];
|
||||
};
|
||||
@@ -710,123 +696,138 @@ export const genTransReq = (translator, args) => {
|
||||
* 解析翻译接口返回数据
|
||||
* @param {*} translator
|
||||
* @param {*} res
|
||||
* @param {*} apiSetting
|
||||
* @param {*} param3
|
||||
* @returns
|
||||
*/
|
||||
export const parseTransRes = (
|
||||
translator,
|
||||
res,
|
||||
apiSetting,
|
||||
{ text, from, to }
|
||||
{ texts, from, to, resHook }
|
||||
) => {
|
||||
let trText = ""; // 返回的译文
|
||||
let isSame = false; // 译文与原文语言是否相同
|
||||
|
||||
switch (translator) {
|
||||
case OPT_TRANS_GOOGLE:
|
||||
trText = res.sentences.map((item) => item.trans).join(" ");
|
||||
isSame = to === res.src;
|
||||
break;
|
||||
return [[res?.sentences?.map((item) => item.trans).join(" "), res?.src]];
|
||||
case OPT_TRANS_GOOGLE_2:
|
||||
trText = res?.[0]?.[0] || "";
|
||||
isSame = to === res.src;
|
||||
break;
|
||||
return res?.[0]?.map((_, i) => [res?.[0]?.[i], res?.[1]?.[i]]);
|
||||
case OPT_TRANS_MICROSOFT:
|
||||
trText = res
|
||||
.map((item) => item.translations.map((item) => item.text).join(" "))
|
||||
.join(" ");
|
||||
isSame = text === trText;
|
||||
break;
|
||||
return res?.map((item) => [
|
||||
item.translations.map((item) => item.text).join(" "),
|
||||
item.detectedLanguage.language,
|
||||
]);
|
||||
case OPT_TRANS_DEEPL:
|
||||
trText = res.translations.map((item) => item.text).join(" ");
|
||||
isSame = to === res.translations[0].detected_source_language;
|
||||
break;
|
||||
return res?.translations?.map((item) => [
|
||||
item.text,
|
||||
item.detected_source_language,
|
||||
]);
|
||||
case OPT_TRANS_DEEPLFREE:
|
||||
trText = res.result?.texts.map((item) => item.text).join(" ");
|
||||
isSame = to === res.result?.lang;
|
||||
break;
|
||||
return [
|
||||
[
|
||||
res?.result?.texts?.map((item) => item.text).join(" "),
|
||||
res?.result?.lang,
|
||||
],
|
||||
];
|
||||
case OPT_TRANS_DEEPLX:
|
||||
trText = res.data;
|
||||
isSame = to === res.source_lang;
|
||||
break;
|
||||
return [[res?.data, res?.source_lang]];
|
||||
case OPT_TRANS_NIUTRANS:
|
||||
const json = JSON.parse(res);
|
||||
if (json.error_msg) {
|
||||
throw new Error(json.error_msg);
|
||||
}
|
||||
trText = json.tgt_text;
|
||||
isSame = to === json.from;
|
||||
break;
|
||||
return [[json.tgt_text, json.from]];
|
||||
case OPT_TRANS_BAIDU:
|
||||
// trText = res.trans_result?.data.map((item) => item.dst).join(" ");
|
||||
// isSame = res.trans_result?.to === res.trans_result?.from;
|
||||
if (res.type === 1) {
|
||||
trText = Object.keys(JSON.parse(res.result).content[0].mean[0].cont)[0];
|
||||
isSame = to === res.from;
|
||||
return [
|
||||
[
|
||||
Object.keys(JSON.parse(res.result).content[0].mean[0].cont)[0],
|
||||
res.from,
|
||||
],
|
||||
];
|
||||
} else if (res.type === 2) {
|
||||
trText = res.data.map((item) => item.dst).join(" ");
|
||||
isSame = to === res.from;
|
||||
return [[res.data.map((item) => item.dst).join(" "), res.from]];
|
||||
}
|
||||
break;
|
||||
case OPT_TRANS_TENCENT:
|
||||
trText = res?.auto_translation?.[0];
|
||||
isSame = text === trText;
|
||||
break;
|
||||
return res?.auto_translation?.map((text) => [text, res?.src_lang]);
|
||||
case OPT_TRANS_VOLCENGINE:
|
||||
trText = res?.translation || "";
|
||||
isSame = to === res?.detected_language;
|
||||
break;
|
||||
return new Map([[0, [res?.translation, res?.detected_language]]]);
|
||||
case OPT_TRANS_OPENAI:
|
||||
case OPT_TRANS_OPENAI_2:
|
||||
case OPT_TRANS_OPENAI_3:
|
||||
case OPT_TRANS_GEMINI_2:
|
||||
case OPT_TRANS_OPENROUTER:
|
||||
trText = res?.choices?.map((item) => item.message.content).join(" ");
|
||||
isSame = text === trText;
|
||||
break;
|
||||
return parseTranslations(res?.choices?.[0]?.message?.content ?? "");
|
||||
case OPT_TRANS_GEMINI:
|
||||
trText = res?.candidates
|
||||
?.map((item) => item.content?.parts.map((item) => item.text).join(" "))
|
||||
.join(" ");
|
||||
isSame = text === trText;
|
||||
break;
|
||||
return parseTranslations(
|
||||
res?.candidates?.[0]?.content?.parts?.[0]?.text ?? ""
|
||||
);
|
||||
case OPT_TRANS_CLAUDE:
|
||||
trText = res?.content?.map((item) => item.text).join(" ");
|
||||
isSame = text === trText;
|
||||
break;
|
||||
return parseTranslations(res?.content?.[0]?.text ?? "");
|
||||
case OPT_TRANS_CLOUDFLAREAI:
|
||||
trText = res?.result?.translated_text;
|
||||
isSame = text === trText;
|
||||
break;
|
||||
return [[res?.result?.translated_text]];
|
||||
case OPT_TRANS_OLLAMA:
|
||||
case OPT_TRANS_OLLAMA_2:
|
||||
case OPT_TRANS_OLLAMA_3:
|
||||
const { thinkIgnore = "" } = apiSetting;
|
||||
const deepModels = thinkIgnore.split(",").filter((model) => model.trim());
|
||||
if (deepModels.some((model) => res?.model?.startsWith(model))) {
|
||||
trText = res?.response.replace(/<think>[\s\S]*<\/think>/i, "");
|
||||
} else {
|
||||
trText = res?.response;
|
||||
}
|
||||
isSame = text === trText;
|
||||
break;
|
||||
// const deepModels = thinkIgnore.split(",").filter((model) => model.trim());
|
||||
// if (deepModels.some((model) => res?.model?.startsWith(model))) {
|
||||
// trText = res?.response.replace(/<think>[\s\S]*<\/think>/i, "");
|
||||
// } else {
|
||||
// trText = res?.response;
|
||||
// }
|
||||
return parseTranslations(res?.response ?? "");
|
||||
case OPT_TRANS_CUSTOMIZE:
|
||||
case OPT_TRANS_CUSTOMIZE_2:
|
||||
case OPT_TRANS_CUSTOMIZE_3:
|
||||
case OPT_TRANS_CUSTOMIZE_4:
|
||||
case OPT_TRANS_CUSTOMIZE_5:
|
||||
const { resHook } = apiSetting;
|
||||
if (resHook?.trim()) {
|
||||
interpreter.run(`exports.resHook = ${resHook}`);
|
||||
[trText, isSame] = interpreter.exports.resHook(res, text, from, to);
|
||||
return interpreter.exports.resHook({ res, texts, from, to });
|
||||
} else {
|
||||
trText = res.text;
|
||||
isSame = to === res.from;
|
||||
return res?.map((item) => [item.text, item.src]);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
}
|
||||
|
||||
return [trText, isSame];
|
||||
return [];
|
||||
};
|
||||
|
||||
/**
|
||||
* 发送翻译请求并解析
|
||||
* @param {*} param0
|
||||
* @returns
|
||||
*/
|
||||
export const fetchTranslate = async ({
|
||||
translator,
|
||||
texts,
|
||||
from,
|
||||
to,
|
||||
docInfo,
|
||||
apiSetting,
|
||||
usePool,
|
||||
}) => {
|
||||
const [input, init] = await genTransReq(translator, {
|
||||
texts,
|
||||
from,
|
||||
to,
|
||||
docInfo,
|
||||
...apiSetting,
|
||||
});
|
||||
|
||||
const res = await fetchData(input, init, {
|
||||
useCache: false,
|
||||
usePool,
|
||||
fetchInterval: apiSetting.fetchInterval,
|
||||
fetchLimit: apiSetting.fetchLimit,
|
||||
httpTimeout: apiSetting.httpTimeout,
|
||||
});
|
||||
if (!res) {
|
||||
throw new Error("tranlate got empty response");
|
||||
}
|
||||
|
||||
return parseTransRes(translator, res, {
|
||||
texts,
|
||||
from,
|
||||
to,
|
||||
...apiSetting,
|
||||
});
|
||||
};
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
export const DEFAULT_HTTP_TIMEOUT = 10000; // 调用超时时间
|
||||
export const DEFAULT_FETCH_LIMIT = 10; // 默认最大任务数量
|
||||
export const DEFAULT_FETCH_INTERVAL = 100; // 默认任务间隔时间
|
||||
export const DEFAULT_BATCH_INTERVAL = 1000; // 批处理请求间隔时间
|
||||
export const DEFAULT_BATCH_SIZE = 10; // 每次最多发送段落数量
|
||||
export const DEFAULT_BATCH_LENGTH = 10000; // 每次发送最大文字数量
|
||||
|
||||
export const INPUT_PLACE_URL = "{{url}}"; // 占位符
|
||||
export const INPUT_PLACE_FROM = "{{from}}"; // 占位符
|
||||
@@ -66,6 +69,28 @@ export const OPT_TRANS_ALL = [
|
||||
OPT_TRANS_CUSTOMIZE_5,
|
||||
];
|
||||
|
||||
export const OPT_TRANS_BATCH = new Set([
|
||||
OPT_TRANS_GOOGLE_2,
|
||||
OPT_TRANS_MICROSOFT,
|
||||
OPT_TRANS_TENCENT,
|
||||
OPT_TRANS_DEEPL,
|
||||
OPT_TRANS_OPENAI,
|
||||
OPT_TRANS_OPENAI_2,
|
||||
OPT_TRANS_OPENAI_3,
|
||||
OPT_TRANS_GEMINI,
|
||||
OPT_TRANS_GEMINI_2,
|
||||
OPT_TRANS_CLAUDE,
|
||||
OPT_TRANS_OLLAMA,
|
||||
OPT_TRANS_OLLAMA_2,
|
||||
OPT_TRANS_OLLAMA_3,
|
||||
OPT_TRANS_OPENROUTER,
|
||||
OPT_TRANS_CUSTOMIZE,
|
||||
OPT_TRANS_CUSTOMIZE_2,
|
||||
OPT_TRANS_CUSTOMIZE_3,
|
||||
OPT_TRANS_CUSTOMIZE_4,
|
||||
OPT_TRANS_CUSTOMIZE_5,
|
||||
]);
|
||||
|
||||
export const OPT_LANGDETECTOR_ALL = [
|
||||
OPT_TRANS_GOOGLE,
|
||||
OPT_TRANS_MICROSOFT,
|
||||
@@ -247,23 +272,18 @@ export const OPT_LANGS_SPECIAL = {
|
||||
]),
|
||||
[OPT_TRANS_CUSTOMIZE]: new Map([
|
||||
...OPT_LANGS_FROM.map(([key]) => [key, key]),
|
||||
["auto", ""],
|
||||
]),
|
||||
[OPT_TRANS_CUSTOMIZE_2]: new Map([
|
||||
...OPT_LANGS_FROM.map(([key]) => [key, key]),
|
||||
["auto", ""],
|
||||
]),
|
||||
[OPT_TRANS_CUSTOMIZE_3]: new Map([
|
||||
...OPT_LANGS_FROM.map(([key]) => [key, key]),
|
||||
["auto", ""],
|
||||
]),
|
||||
[OPT_TRANS_CUSTOMIZE_4]: new Map([
|
||||
...OPT_LANGS_FROM.map(([key]) => [key, key]),
|
||||
["auto", ""],
|
||||
]),
|
||||
[OPT_TRANS_CUSTOMIZE_5]: new Map([
|
||||
...OPT_LANGS_FROM.map(([key]) => [key, key]),
|
||||
["auto", ""],
|
||||
]),
|
||||
};
|
||||
export const OPT_LANGS_LIST = OPT_LANGS_TO.map(([lang]) => lang);
|
||||
@@ -294,8 +314,30 @@ const defaultApi = {
|
||||
url: "",
|
||||
key: "",
|
||||
model: "", // 模型名称
|
||||
systemPrompt: `You are a professional, authentic machine translation engine.`,
|
||||
userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`,
|
||||
systemPrompt: `You are a translation API.
|
||||
|
||||
Output:
|
||||
- Return one raw JSON object only.
|
||||
- Start with "{" and end with "}".
|
||||
- No fences or extra text.
|
||||
|
||||
Input JSON:
|
||||
{"targetLanguage":"<lang>","title":"<title>","description":"<desc>","segments":[{"id":1,"text":"..."}]}
|
||||
|
||||
Output JSON:
|
||||
{"translations":[{"id":1,"text":"...","sourceLanguage":"<detected-language>"}]}
|
||||
|
||||
Rules:
|
||||
1. Use title/description as context only, do not output them.
|
||||
2. Keep ids/order/count.
|
||||
3. Translate inner text only, not HTML tags.
|
||||
4. Do not translate <code>, <pre>, backticks, or terms like React, Docker, JavaScript, API.
|
||||
5. Preserve whitespace & entities.
|
||||
6. Automatically detect the source language of each segment and add it in the "sourceLanguage" field.
|
||||
7. Empty/unchanged input → unchanged.
|
||||
|
||||
Fail-safe: {"translations":[]}`,
|
||||
userPrompt: `${INPUT_PLACE_TEXT}`,
|
||||
customHeader: "",
|
||||
customBody: "",
|
||||
reqHook: "", // request 钩子函数
|
||||
@@ -303,28 +345,31 @@ const defaultApi = {
|
||||
fetchLimit: DEFAULT_FETCH_LIMIT, // 最大请求数量
|
||||
fetchInterval: DEFAULT_FETCH_INTERVAL, // 请求间隔时间
|
||||
httpTimeout: DEFAULT_HTTP_TIMEOUT, // 请求超时时间
|
||||
batchInterval: DEFAULT_BATCH_INTERVAL, // 批处理请求间隔时间
|
||||
batchSize: DEFAULT_BATCH_SIZE, // 每次最多发送段落数量
|
||||
batchLength: DEFAULT_BATCH_LENGTH, // 每次发送最大文字数量
|
||||
isBatchFetch: false, // 是否启用聚合发送请求
|
||||
isRichText: false, // 是否启用富文本翻译
|
||||
isContext: false, // 是否启用智能上下文
|
||||
temperature: 0,
|
||||
maxTokens: 2048,
|
||||
maxTokens: 20480,
|
||||
think: false,
|
||||
thinkIgnore: "qwen3,deepseek-r1",
|
||||
isDisabled: false, // 是否不显示
|
||||
};
|
||||
const defaultCustomApi = {
|
||||
...defaultApi,
|
||||
url: "https://translate.googleapis.com/translate_a/single?client=gtx&dj=1&dt=t&ie=UTF-8&q={{text}}&sl=en&tl=zh-CN",
|
||||
reqHook: `// Request Hook
|
||||
(text, from, to, url, key) => [
|
||||
url,
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: \`Bearer \${key}\`,
|
||||
},
|
||||
method: "GET",
|
||||
body: JSON.stringify({ text, from, to }),
|
||||
(text, from, to, url, key) => [url, {
|
||||
headers: {
|
||||
"Content-type": "application/json",
|
||||
},
|
||||
];`,
|
||||
method: "GET",
|
||||
body: null,
|
||||
}]`,
|
||||
resHook: `// Response Hook
|
||||
(res, text, from, to) => [res.text, to === res.src];`,
|
||||
(res, text, from, to) => [res.sentences.map((item) => item.trans).join(" "), to === res.src]`,
|
||||
};
|
||||
const defaultOpenaiApi = {
|
||||
...defaultApi,
|
||||
@@ -350,11 +395,13 @@ export const DEFAULT_TRANS_APIS = {
|
||||
apiName: OPT_TRANS_GOOGLE_2,
|
||||
url: "https://translate-pa.googleapis.com/v1/translateHtml",
|
||||
key: "AIzaSyATBXajvzQLTDHEQbcpq0Ihe0vWDHmO520",
|
||||
isBatchFetch: true,
|
||||
},
|
||||
[OPT_TRANS_MICROSOFT]: {
|
||||
...defaultApi,
|
||||
apiSlug: OPT_TRANS_MICROSOFT,
|
||||
apiName: OPT_TRANS_MICROSOFT,
|
||||
isBatchFetch: true,
|
||||
},
|
||||
[OPT_TRANS_BAIDU]: {
|
||||
...defaultApi,
|
||||
@@ -365,6 +412,7 @@ export const DEFAULT_TRANS_APIS = {
|
||||
...defaultApi,
|
||||
apiSlug: OPT_TRANS_TENCENT,
|
||||
apiName: OPT_TRANS_TENCENT,
|
||||
isBatchFetch: true,
|
||||
},
|
||||
[OPT_TRANS_VOLCENGINE]: {
|
||||
...defaultApi,
|
||||
@@ -376,7 +424,7 @@ export const DEFAULT_TRANS_APIS = {
|
||||
apiSlug: OPT_TRANS_DEEPL,
|
||||
apiName: OPT_TRANS_DEEPL,
|
||||
url: "https://api-free.deepl.com/v2/translate",
|
||||
fetchLimit: 1,
|
||||
isBatchFetch: true,
|
||||
},
|
||||
[OPT_TRANS_DEEPLFREE]: {
|
||||
...defaultApi,
|
||||
|
||||
@@ -1228,4 +1228,24 @@ export const I18N = {
|
||||
en: `If translate selected`,
|
||||
zh_TW: `是否啟用劃詞翻譯`,
|
||||
},
|
||||
is_batch_fetch: {
|
||||
zh: `是否聚合发送翻译请求`,
|
||||
en: `Whether to aggregate and send translation requests`,
|
||||
zh_TW: `是否聚合發送翻譯請求`,
|
||||
},
|
||||
batch_interval: {
|
||||
zh: `聚合请求等待时间(100-5000)`,
|
||||
en: `Aggregation request waiting time (100-5000)`,
|
||||
zh_TW: `聚合請求等待時間(100-5000)`,
|
||||
},
|
||||
batch_size: {
|
||||
zh: `聚合请求最大段落数(1-100)`,
|
||||
en: `Maximum number of paragraphs in an aggregation request (1-100)`,
|
||||
zh_TW: `聚合請求最大段落數(1-100)`,
|
||||
},
|
||||
batch_length: {
|
||||
zh: `聚合请求最大文本长度(500-50000)`,
|
||||
en: `Maximum text length for aggregation requests (500-50000)`,
|
||||
zh_TW: `聚合請求最大文字長度(500-50000)`,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -12,7 +12,7 @@ import { kissLog } from "../libs/log";
|
||||
* @param {*} setting
|
||||
* @returns
|
||||
*/
|
||||
export function useTranslate(q, rule, setting) {
|
||||
export function useTranslate(q, rule, setting, docInfo) {
|
||||
const [text, setText] = useState("");
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [sameLang, setSamelang] = useState(false);
|
||||
@@ -48,6 +48,7 @@ export function useTranslate(q, rule, setting) {
|
||||
...DEFAULT_TRANS_APIS[translator],
|
||||
...(setting.transApis[translator] || {}),
|
||||
},
|
||||
docInfo,
|
||||
});
|
||||
setText(trText);
|
||||
setSamelang(isSame);
|
||||
@@ -58,7 +59,16 @@ export function useTranslate(q, rule, setting) {
|
||||
setLoading(false);
|
||||
}
|
||||
})();
|
||||
}, [q, translator, fromLang, toLang, detectRemote, skipLangs, setting]);
|
||||
}, [
|
||||
q,
|
||||
translator,
|
||||
fromLang,
|
||||
toLang,
|
||||
detectRemote,
|
||||
skipLangs,
|
||||
setting,
|
||||
docInfo,
|
||||
]);
|
||||
|
||||
return { text, sameLang, loading };
|
||||
}
|
||||
|
||||
153
src/libs/batchQueue.js
Normal file
153
src/libs/batchQueue.js
Normal file
@@ -0,0 +1,153 @@
|
||||
import { fetchTranslate } from "../apis/trans";
|
||||
|
||||
/**
|
||||
* 批处理队列
|
||||
* @param {*} translator
|
||||
* @param {*} param1
|
||||
* @returns
|
||||
*/
|
||||
const batchQueue = (
|
||||
{ translator, from, to, docInfo, apiSetting, usePool },
|
||||
{ batchInterval = 1000, batchSize = 10, batchLength = 10000 } = {}
|
||||
) => {
|
||||
const queue = [];
|
||||
let isProcessing = false;
|
||||
let timer = null;
|
||||
|
||||
const sendBatchRequest = async (payloads) => {
|
||||
const texts = payloads.map((item) => item.text);
|
||||
return fetchTranslate({
|
||||
translator,
|
||||
texts,
|
||||
from,
|
||||
to,
|
||||
docInfo,
|
||||
apiSetting,
|
||||
usePool,
|
||||
});
|
||||
};
|
||||
|
||||
const processQueue = async () => {
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
timer = null;
|
||||
}
|
||||
|
||||
if (queue.length === 0 || isProcessing) {
|
||||
return;
|
||||
}
|
||||
|
||||
isProcessing = true;
|
||||
|
||||
let tasksToProcess = [];
|
||||
let currentBatchLength = 0;
|
||||
let endIndex = 0;
|
||||
|
||||
for (const task of queue) {
|
||||
const textLength = task.payload.text?.length || 0;
|
||||
if (
|
||||
endIndex >= batchSize ||
|
||||
(currentBatchLength + textLength > batchLength && endIndex > 0)
|
||||
) {
|
||||
break;
|
||||
}
|
||||
currentBatchLength += textLength;
|
||||
endIndex++;
|
||||
}
|
||||
|
||||
if (endIndex > 0) {
|
||||
tasksToProcess = queue.splice(0, endIndex);
|
||||
}
|
||||
|
||||
if (tasksToProcess.length === 0) {
|
||||
isProcessing = false;
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const payloads = tasksToProcess.map((item) => item.payload);
|
||||
const responses = await sendBatchRequest(payloads);
|
||||
|
||||
tasksToProcess.forEach((taskItem, index) => {
|
||||
const response = responses[index];
|
||||
if (response) {
|
||||
taskItem.resolve(response);
|
||||
} else {
|
||||
taskItem.reject(new Error(`No response for item at index ${index}`));
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
tasksToProcess.forEach((taskItem) => taskItem.reject(error));
|
||||
} finally {
|
||||
isProcessing = false;
|
||||
if (queue.length > 0) {
|
||||
if (queue.length >= batchSize) {
|
||||
setTimeout(processQueue, 0);
|
||||
} else {
|
||||
scheduleProcessing();
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const scheduleProcessing = () => {
|
||||
if (!isProcessing && !timer && queue.length > 0) {
|
||||
timer = setTimeout(processQueue, batchInterval);
|
||||
}
|
||||
};
|
||||
|
||||
const addTask = (data) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const payload = data;
|
||||
queue.push({ payload, resolve, reject });
|
||||
|
||||
if (queue.length >= batchSize) {
|
||||
processQueue();
|
||||
} else {
|
||||
scheduleProcessing();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const destroy = () => {
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
timer = null;
|
||||
}
|
||||
queue.forEach((task) =>
|
||||
task.reject(new Error("Queue instance was destroyed."))
|
||||
);
|
||||
queue.length = 0;
|
||||
};
|
||||
|
||||
return { addTask, destroy };
|
||||
};
|
||||
|
||||
// 实例字典
|
||||
const queueMap = new Map();
|
||||
|
||||
/**
|
||||
* 获取批处理实例
|
||||
* @param {*} translator
|
||||
* @returns
|
||||
*/
|
||||
export const getBatchQueue = (args, opts) => {
|
||||
const { translator, from, to } = args;
|
||||
const key = `${translator}_${from}_${to}`;
|
||||
if (queueMap.has(key)) {
|
||||
return queueMap.get(key);
|
||||
}
|
||||
|
||||
const queue = batchQueue(args, opts);
|
||||
queueMap.set(key, queue);
|
||||
return queue;
|
||||
};
|
||||
|
||||
/**
|
||||
* 清除所有任务
|
||||
*/
|
||||
export const clearAllBatchQueue = () => {
|
||||
for (const queue of queueMap.entries()) {
|
||||
queue.destroy();
|
||||
}
|
||||
};
|
||||
@@ -24,6 +24,7 @@ import { isExt } from "./client";
|
||||
import { injectInlineJs, injectInternalCss } from "./injector";
|
||||
import { kissLog } from "./log";
|
||||
import interpreter from "./interpreter";
|
||||
import { clearAllBatchQueue } from "./batchQueue";
|
||||
|
||||
/**
|
||||
* 翻译类
|
||||
@@ -54,6 +55,7 @@ export class Translator {
|
||||
_keepSelector = "";
|
||||
_terms = [];
|
||||
_docTitle = "";
|
||||
_docDescription = "";
|
||||
|
||||
// 显示
|
||||
_interseObserver = new IntersectionObserver(
|
||||
@@ -95,6 +97,11 @@ export class Translator {
|
||||
});
|
||||
});
|
||||
|
||||
_getDocDescription = () => {
|
||||
const meta = document.querySelector('meta[name="description"]');
|
||||
return meta ? meta.getAttribute("content") : "";
|
||||
};
|
||||
|
||||
// 插入 shadowroot
|
||||
_overrideAttachShadow = () => {
|
||||
const _this = this;
|
||||
@@ -110,6 +117,8 @@ export class Translator {
|
||||
|
||||
this._setting = setting;
|
||||
this._rule = rule;
|
||||
this._docTitle = document.title;
|
||||
this._docDescription = this._getDocDescription();
|
||||
|
||||
this._keepSelector = rule.keepSelector || "";
|
||||
this._terms = (rule.terms || "")
|
||||
@@ -126,6 +135,13 @@ export class Translator {
|
||||
return this._setting;
|
||||
}
|
||||
|
||||
get docInfo() {
|
||||
return {
|
||||
title: this._docTitle,
|
||||
description: this._docDescription,
|
||||
};
|
||||
}
|
||||
|
||||
get eventName() {
|
||||
return this._eventName;
|
||||
}
|
||||
@@ -426,6 +442,7 @@ export class Translator {
|
||||
|
||||
// 清空任务池
|
||||
clearFetchPool();
|
||||
clearAllBatchQueue();
|
||||
};
|
||||
|
||||
_removeInjector = () => {
|
||||
|
||||
@@ -289,3 +289,16 @@ export const parseJsonObj = (str) => {
|
||||
|
||||
return {};
|
||||
};
|
||||
|
||||
/**
|
||||
* 提取json内容
|
||||
* @param {*} s
|
||||
* @returns
|
||||
*/
|
||||
export const extractJson = (raw) => {
|
||||
if (!raw) return "{}";
|
||||
|
||||
let s = raw.replace(/^\s*```(?:json)?\s*/i, "").replace(/\s*```\s*$/i, "");
|
||||
const match = s.match(/\{[\s\S]*\}/);
|
||||
return match ? match[0] : "{}";
|
||||
};
|
||||
|
||||
@@ -98,7 +98,12 @@ const StyledSpan = styled("span")`
|
||||
|
||||
export default function Content({ q, keeps, translator, $el }) {
|
||||
const [rule, setRule] = useState(translator.rule);
|
||||
const { text, sameLang, loading } = useTranslate(q, rule, translator.setting);
|
||||
const { text, sameLang, loading } = useTranslate(
|
||||
q,
|
||||
rule,
|
||||
translator.setting,
|
||||
translator.docInfo
|
||||
);
|
||||
const {
|
||||
transOpen,
|
||||
textStyle,
|
||||
|
||||
@@ -34,6 +34,10 @@ import {
|
||||
DEFAULT_FETCH_LIMIT,
|
||||
DEFAULT_FETCH_INTERVAL,
|
||||
DEFAULT_HTTP_TIMEOUT,
|
||||
OPT_TRANS_BATCH,
|
||||
DEFAULT_BATCH_INTERVAL,
|
||||
DEFAULT_BATCH_SIZE,
|
||||
DEFAULT_BATCH_LENGTH,
|
||||
} from "../../config";
|
||||
import { useState } from "react";
|
||||
import { useI18n } from "../../hooks/I18n";
|
||||
@@ -140,6 +144,10 @@ function ApiFields({ translator, api, updateApi, resetApi }) {
|
||||
maxTokens = 256,
|
||||
apiName = "",
|
||||
isDisabled = false,
|
||||
isBatchFetch = false,
|
||||
batchInterval = DEFAULT_BATCH_INTERVAL,
|
||||
batchSize = DEFAULT_BATCH_SIZE,
|
||||
batchLength = DEFAULT_BATCH_LENGTH,
|
||||
} = api;
|
||||
|
||||
const handleChange = (e) => {
|
||||
@@ -160,6 +168,15 @@ function ApiFields({ translator, api, updateApi, resetApi }) {
|
||||
case "maxTokens":
|
||||
value = limitNumber(value, 0, 2 ** 15);
|
||||
break;
|
||||
case "batchInterval":
|
||||
value = limitNumber(value, 100, 5000);
|
||||
break;
|
||||
case "batchSize":
|
||||
value = limitNumber(value, 1, 100);
|
||||
break;
|
||||
case "batchLength":
|
||||
value = limitNumber(value, 500, 50000);
|
||||
break;
|
||||
default:
|
||||
}
|
||||
updateApi({
|
||||
@@ -394,6 +411,50 @@ function ApiFields({ translator, api, updateApi, resetApi }) {
|
||||
</>
|
||||
)}
|
||||
|
||||
{OPT_TRANS_BATCH.has(translator) && (
|
||||
<>
|
||||
<TextField
|
||||
select
|
||||
size="small"
|
||||
name="isBatchFetch"
|
||||
value={isBatchFetch}
|
||||
label={i18n("is_batch_fetch")}
|
||||
onChange={handleChange}
|
||||
>
|
||||
<MenuItem value={false}>{i18n("disable")}</MenuItem>
|
||||
<MenuItem value={true}>{i18n("enable")}</MenuItem>
|
||||
</TextField>
|
||||
{isBatchFetch && (
|
||||
<>
|
||||
<TextField
|
||||
size="small"
|
||||
label={i18n("batch_interval")}
|
||||
type="number"
|
||||
name="batchInterval"
|
||||
value={batchInterval}
|
||||
onChange={handleChange}
|
||||
/>
|
||||
<TextField
|
||||
size="small"
|
||||
label={i18n("batch_size")}
|
||||
type="number"
|
||||
name="batchSize"
|
||||
value={batchSize}
|
||||
onChange={handleChange}
|
||||
/>
|
||||
<TextField
|
||||
size="small"
|
||||
label={i18n("batch_length")}
|
||||
type="number"
|
||||
name="batchLength"
|
||||
value={batchLength}
|
||||
onChange={handleChange}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
<TextField
|
||||
size="small"
|
||||
label={i18n("fetch_limit")}
|
||||
|
||||
@@ -71,6 +71,7 @@ export default function FavWords() {
|
||||
const tranList = [];
|
||||
for (const text of downloadList) {
|
||||
try {
|
||||
// todo
|
||||
const dictRes = await apiTranslate({
|
||||
text,
|
||||
translator: OPT_TRANS_BAIDU,
|
||||
|
||||
@@ -26,6 +26,7 @@ export default function DictCont({ text }) {
|
||||
return;
|
||||
}
|
||||
|
||||
// todo
|
||||
const dictRes = await apiTranslate({
|
||||
text,
|
||||
translator: OPT_TRANS_BAIDU,
|
||||
|
||||
Reference in New Issue
Block a user