refactor: Optimize data and cache request logic

This commit is contained in:
Gabe
2025-08-31 23:37:29 +08:00
parent 4a5e6c2a23
commit c4fba1c905
10 changed files with 443 additions and 425 deletions

View File

@@ -1,49 +1,19 @@
import queryString from "query-string";
import { fetchData } from "../libs/fetch";
import {
OPT_TRANS_GOOGLE,
OPT_TRANS_GOOGLE_2,
OPT_TRANS_MICROSOFT,
OPT_TRANS_DEEPL,
OPT_TRANS_DEEPLFREE,
OPT_TRANS_DEEPLX,
OPT_TRANS_NIUTRANS,
OPT_TRANS_BAIDU,
OPT_TRANS_TENCENT,
OPT_TRANS_VOLCENGINE,
OPT_TRANS_OPENAI,
OPT_TRANS_OPENAI_2,
OPT_TRANS_OPENAI_3,
OPT_TRANS_GEMINI,
OPT_TRANS_GEMINI_2,
OPT_TRANS_CLAUDE,
OPT_TRANS_CLOUDFLAREAI,
OPT_TRANS_OLLAMA,
OPT_TRANS_OLLAMA_2,
OPT_TRANS_OLLAMA_3,
OPT_TRANS_OPENROUTER,
OPT_TRANS_CUSTOMIZE,
OPT_TRANS_CUSTOMIZE_2,
OPT_TRANS_CUSTOMIZE_3,
OPT_TRANS_CUSTOMIZE_4,
OPT_TRANS_CUSTOMIZE_5,
URL_CACHE_TRAN,
KV_SALT_SYNC,
URL_GOOGLE_TRAN,
URL_MICROSOFT_LANGDETECT,
URL_BAIDU_LANGDETECT,
URL_BAIDU_SUGGEST,
URL_BAIDU_TTS,
OPT_LANGS_BAIDU,
URL_TENCENT_TRANSMART,
OPT_LANGS_TENCENT,
OPT_LANGS_SPECIAL,
OPT_LANGS_MICROSOFT,
} from "../config";
import { sha256 } from "../libs/utils";
import interpreter from "../libs/interpreter";
import { msAuth } from "../libs/auth";
import { kissLog } from "../libs/log";
import { genTransReq, parseTransRes } from "./trans";
import { getHttpCachePolyfill, putHttpCachePolyfill } from "../libs/cache";
/**
* 同步数据
@@ -85,14 +55,19 @@ export const apiGoogleLangdetect = async (text) => {
q: text,
};
const input = `${URL_GOOGLE_TRAN}?${queryString.stringify(params)}`;
const res = await fetchData(input, {
const init = {
headers: {
"Content-type": "application/json",
},
useCache: true,
});
};
const res = await fetchData(input, init, { useCache: true });
if (res?.src) {
await putHttpCachePolyfill(input, init, res);
return res.src;
}
return "";
};
/**
@@ -102,17 +77,26 @@ export const apiGoogleLangdetect = async (text) => {
*/
export const apiMicrosoftLangdetect = async (text) => {
const [token] = await msAuth();
const res = await fetchData(URL_MICROSOFT_LANGDETECT, {
const input =
"https://api-edge.cognitive.microsofttranslator.com/detect?api-version=3.0";
const init = {
headers: {
"Content-type": "application/json",
Authorization: `Bearer ${token}`,
},
method: "POST",
body: JSON.stringify([{ Text: text }]),
};
const res = await fetchData(input, init, {
useCache: true,
});
if (res[0].language) {
await putHttpCachePolyfill(input, init, res);
return OPT_LANGS_MICROSOFT.get(res[0].language) ?? res[0].language;
}
return "";
};
/**
@@ -121,7 +105,8 @@ export const apiMicrosoftLangdetect = async (text) => {
* @returns
*/
export const apiBaiduLangdetect = async (text) => {
const res = await fetchData(URL_BAIDU_LANGDETECT, {
const input = "https://fanyi.baidu.com/langdetect";
const init = {
headers: {
"Content-type": "application/json",
},
@@ -129,10 +114,11 @@ export const apiBaiduLangdetect = async (text) => {
body: JSON.stringify({
query: text,
}),
useCache: true,
});
};
const res = await fetchData(input, init, { useCache: true });
if (res.error === 0) {
await putHttpCachePolyfill(input, init, res);
return OPT_LANGS_BAIDU.get(res.lan) ?? res.lan;
}
@@ -145,7 +131,8 @@ export const apiBaiduLangdetect = async (text) => {
* @returns
*/
export const apiBaiduSuggest = async (text) => {
const res = await fetchData(URL_BAIDU_SUGGEST, {
const input = "https://fanyi.baidu.com/sug";
const init = {
headers: {
"Content-type": "application/json",
},
@@ -153,10 +140,11 @@ export const apiBaiduSuggest = async (text) => {
body: JSON.stringify({
kw: text,
}),
useCache: true,
});
};
const res = await fetchData(input, init, { useCache: true });
if (res.errno === 0) {
await putHttpCachePolyfill(input, init, res);
return res.data;
}
@@ -171,10 +159,8 @@ export const apiBaiduSuggest = async (text) => {
* @returns
*/
export const apiBaiduTTS = (text, lan = "uk", spd = 3) => {
const url = `${URL_BAIDU_TTS}?${queryString.stringify({ lan, text, spd })}`;
return fetchData(url, {
useCache: false, // 为避免缓存过快增长,禁用缓存语音数据
});
const input = `https://fanyi.baidu.com/gettts?${queryString.stringify({ lan, text, spd })}`;
return fetchData(input);
};
/**
@@ -183,23 +169,28 @@ export const apiBaiduTTS = (text, lan = "uk", spd = 3) => {
* @returns
*/
export const apiTencentLangdetect = async (text) => {
const input = "https://transmart.qq.com/api/imt";
const body = JSON.stringify({
header: {
fn: "text_analysis",
},
text,
});
const res = await fetchData(URL_TENCENT_TRANSMART, {
const init = {
headers: {
"Content-type": "application/json",
},
method: "POST",
body,
useCache: true,
});
};
const res = await fetchData(input, init, { useCache: true });
if (res.language) {
await putHttpCachePolyfill(input, init, res);
return OPT_LANGS_TENCENT.get(res.language) ?? res.language;
}
return "";
};
/**
@@ -216,11 +207,12 @@ export const apiTranslate = async ({
useCache = true,
usePool = true,
}) => {
let trText = "";
let isSame = false;
let cacheInput; // 缓存URL
let resCache; // 缓存对象
let res; // 翻译接口返回的JSON数据
if (!text) {
return [trText, true];
return ["", false];
}
const from =
@@ -229,139 +221,59 @@ export const apiTranslate = async ({
const to = OPT_LANGS_SPECIAL[translator].get(toLang);
if (!to) {
kissLog(`target lang: ${toLang} not support`, "translate");
return [trText, isSame];
return ["", false];
}
// 版本号一/二位升级,旧缓存失效
// 查询缓存数据
// TODO 优化缓存失效因素
if (useCache) {
const [v1, v2] = process.env.REACT_APP_VERSION.split(".");
const cacheOpts = {
translator,
text,
fromLang,
toLang,
userPrompt: apiSetting.userPrompt, // prompt改变缓存失效
model: apiSetting.model, // model改变缓存失效
version: [v1, v2].join("."),
};
cacheInput = `${URL_CACHE_TRAN}?${queryString.stringify(cacheOpts)}`;
resCache = await getHttpCachePolyfill(cacheInput);
}
const transOpts = {
translator,
// 请求接口数据
if (!resCache) {
const [input, init] = await genTransReq(translator, {
text,
from,
to,
};
const res = await fetchData(
`${URL_CACHE_TRAN}?${queryString.stringify(cacheOpts)}`,
{
useCache,
...apiSetting,
});
res = await fetchData(input, init, {
useCache: false,
usePool,
transOpts,
apiSetting,
fetchInterval: apiSetting.fetchInterval,
fetchLimit: apiSetting.fetchLimit,
httpTimeout: apiSetting.httpTimeout,
});
} else {
res = resCache;
}
);
switch (translator) {
case OPT_TRANS_GOOGLE:
trText = res.sentences.map((item) => item.trans).join(" ");
isSame = to === res.src;
break;
case OPT_TRANS_GOOGLE_2:
trText = res?.[0]?.[0] || "";
isSame = to === res.src;
break;
case OPT_TRANS_MICROSOFT:
trText = res
.map((item) => item.translations.map((item) => item.text).join(" "))
.join(" ");
isSame = text === trText;
break;
case OPT_TRANS_DEEPL:
trText = res.translations.map((item) => item.text).join(" ");
isSame = to === res.translations[0].detected_source_language;
break;
case OPT_TRANS_DEEPLFREE:
trText = res.result?.texts.map((item) => item.text).join(" ");
isSame = to === res.result?.lang;
break;
case OPT_TRANS_DEEPLX:
trText = res.data;
isSame = to === res.source_lang;
break;
case OPT_TRANS_NIUTRANS:
const json = JSON.parse(res);
if (json.error_msg) {
throw new Error(json.error_msg);
if (!res) {
return ["", false];
}
trText = json.tgt_text;
isSame = to === json.from;
break;
case OPT_TRANS_BAIDU:
// trText = res.trans_result?.data.map((item) => item.dst).join(" ");
// isSame = res.trans_result?.to === res.trans_result?.from;
if (res.type === 1) {
trText = Object.keys(JSON.parse(res.result).content[0].mean[0].cont)[0];
isSame = to === res.from;
} else if (res.type === 2) {
trText = res.data.map((item) => item.dst).join(" ");
isSame = to === res.from;
}
break;
case OPT_TRANS_TENCENT:
trText = res?.auto_translation?.[0];
isSame = text === trText;
break;
case OPT_TRANS_VOLCENGINE:
trText = res?.translation || "";
isSame = to === res?.detected_language;
break;
case OPT_TRANS_OPENAI:
case OPT_TRANS_OPENAI_2:
case OPT_TRANS_OPENAI_3:
case OPT_TRANS_GEMINI_2:
case OPT_TRANS_OPENROUTER:
trText = res?.choices?.map((item) => item.message.content).join(" ");
isSame = text === trText;
break;
case OPT_TRANS_GEMINI:
trText = res?.candidates
?.map((item) => item.content?.parts.map((item) => item.text).join(" "))
.join(" ");
isSame = text === trText;
break;
case OPT_TRANS_CLAUDE:
trText = res?.content?.map((item) => item.text).join(" ");
isSame = text === trText;
break;
case OPT_TRANS_CLOUDFLAREAI:
trText = res?.result?.translated_text;
isSame = text === trText;
break;
case OPT_TRANS_OLLAMA:
case OPT_TRANS_OLLAMA_2:
case OPT_TRANS_OLLAMA_3:
const { thinkIgnore = "" } = apiSetting;
const deepModels = thinkIgnore.split(",").filter((model) => model.trim());
if (deepModels.some((model) => res?.model?.startsWith(model))) {
trText = res?.response.replace(/<think>[\s\S]*<\/think>/i, "");
} else {
trText = res?.response;
}
isSame = text === trText;
break;
case OPT_TRANS_CUSTOMIZE:
case OPT_TRANS_CUSTOMIZE_2:
case OPT_TRANS_CUSTOMIZE_3:
case OPT_TRANS_CUSTOMIZE_4:
case OPT_TRANS_CUSTOMIZE_5:
const { resHook } = apiSetting;
if (resHook?.trim()) {
interpreter.run(`exports.resHook = ${resHook}`);
[trText, isSame] = interpreter.exports.resHook(res, text, from, to);
} else {
trText = res.text;
isSame = to === res.from;
}
break;
default:
// 解析返回数据
const [trText, isSame] = parseTransRes(translator, res, apiSetting, {
text,
from,
to,
});
// 插入缓存
if (useCache && !resCache && trText) {
await putHttpCachePolyfill(cacheInput, null, res);
}
return [trText, isSame, res];

View File

@@ -629,9 +629,7 @@ const genCustom = ({ text, from, to, url, key, reqHook }) => {
* @param {*}
* @returns
*/
export const genTransReq = ({ translator, text, from, to }, apiSetting) => {
const args = { text, from, to, ...apiSetting };
export const genTransReq = (translator, args) => {
switch (translator) {
case OPT_TRANS_DEEPL:
case OPT_TRANS_OPENAI:
@@ -708,3 +706,128 @@ export const genTransReq = ({ translator, text, from, to }, apiSetting) => {
throw new Error(`[trans] translator: ${translator} not support`);
}
};
/**
* 解析翻译接口返回数据
* @param {*} translator
* @param {*} res
* @param {*} apiSetting
* @param {*} param3
* @returns
*/
export const parseTransRes = (
translator,
res,
apiSetting,
{ text, from, to }
) => {
let trText = ""; // 返回的译文
let isSame = false; // 译文与原文语言是否相同
switch (translator) {
case OPT_TRANS_GOOGLE:
trText = res.sentences.map((item) => item.trans).join(" ");
isSame = to === res.src;
break;
case OPT_TRANS_GOOGLE_2:
trText = res?.[0]?.[0] || "";
isSame = to === res.src;
break;
case OPT_TRANS_MICROSOFT:
trText = res
.map((item) => item.translations.map((item) => item.text).join(" "))
.join(" ");
isSame = text === trText;
break;
case OPT_TRANS_DEEPL:
trText = res.translations.map((item) => item.text).join(" ");
isSame = to === res.translations[0].detected_source_language;
break;
case OPT_TRANS_DEEPLFREE:
trText = res.result?.texts.map((item) => item.text).join(" ");
isSame = to === res.result?.lang;
break;
case OPT_TRANS_DEEPLX:
trText = res.data;
isSame = to === res.source_lang;
break;
case OPT_TRANS_NIUTRANS:
const json = JSON.parse(res);
if (json.error_msg) {
throw new Error(json.error_msg);
}
trText = json.tgt_text;
isSame = to === json.from;
break;
case OPT_TRANS_BAIDU:
// trText = res.trans_result?.data.map((item) => item.dst).join(" ");
// isSame = res.trans_result?.to === res.trans_result?.from;
if (res.type === 1) {
trText = Object.keys(JSON.parse(res.result).content[0].mean[0].cont)[0];
isSame = to === res.from;
} else if (res.type === 2) {
trText = res.data.map((item) => item.dst).join(" ");
isSame = to === res.from;
}
break;
case OPT_TRANS_TENCENT:
trText = res?.auto_translation?.[0];
isSame = text === trText;
break;
case OPT_TRANS_VOLCENGINE:
trText = res?.translation || "";
isSame = to === res?.detected_language;
break;
case OPT_TRANS_OPENAI:
case OPT_TRANS_OPENAI_2:
case OPT_TRANS_OPENAI_3:
case OPT_TRANS_GEMINI_2:
case OPT_TRANS_OPENROUTER:
trText = res?.choices?.map((item) => item.message.content).join(" ");
isSame = text === trText;
break;
case OPT_TRANS_GEMINI:
trText = res?.candidates
?.map((item) => item.content?.parts.map((item) => item.text).join(" "))
.join(" ");
isSame = text === trText;
break;
case OPT_TRANS_CLAUDE:
trText = res?.content?.map((item) => item.text).join(" ");
isSame = text === trText;
break;
case OPT_TRANS_CLOUDFLAREAI:
trText = res?.result?.translated_text;
isSame = text === trText;
break;
case OPT_TRANS_OLLAMA:
case OPT_TRANS_OLLAMA_2:
case OPT_TRANS_OLLAMA_3:
const { thinkIgnore = "" } = apiSetting;
const deepModels = thinkIgnore.split(",").filter((model) => model.trim());
if (deepModels.some((model) => res?.model?.startsWith(model))) {
trText = res?.response.replace(/<think>[\s\S]*<\/think>/i, "");
} else {
trText = res?.response;
}
isSame = text === trText;
break;
case OPT_TRANS_CUSTOMIZE:
case OPT_TRANS_CUSTOMIZE_2:
case OPT_TRANS_CUSTOMIZE_3:
case OPT_TRANS_CUSTOMIZE_4:
case OPT_TRANS_CUSTOMIZE_5:
const { resHook } = apiSetting;
if (resHook?.trim()) {
interpreter.run(`exports.resHook = ${resHook}`);
[trText, isSame] = interpreter.exports.resHook(res, text, from, to);
} else {
trText = res.text;
isSame = to === res.from;
}
break;
default:
}
return [trText, isSame];
};

View File

@@ -2,6 +2,7 @@ import browser from "webextension-polyfill";
import {
MSG_FETCH,
MSG_GET_HTTPCACHE,
MSG_PUT_HTTPCACHE,
MSG_TRANS_TOGGLE,
MSG_OPEN_OPTIONS,
MSG_SAVE_RULE,
@@ -21,7 +22,8 @@ import {
} from "./config";
import { getSettingWithDefault, tryInitDefaultData } from "./libs/storage";
import { trySyncSettingAndRules } from "./libs/sync";
import { fetchHandle, getHttpCache } from "./libs/fetch";
import { fetchHandle } from "./libs/fetch";
import { getHttpCache, putHttpCache } from "./libs/cache";
import { sendTabMsg } from "./libs/msg";
import { trySyncAllSubRules } from "./libs/subRules";
import { tryClearCaches } from "./libs";
@@ -190,8 +192,9 @@ browser.runtime.onMessage.addListener(async ({ action, args }) => {
case MSG_FETCH:
return await fetchHandle(args);
case MSG_GET_HTTPCACHE:
const { input, init } = args;
return await getHttpCache(input, init);
return await getHttpCache(args.input, args.init);
case MSG_PUT_HTTPCACHE:
return await putHttpCache(args.input, args.init, args.data);
case MSG_OPEN_OPTIONS:
return await browser.runtime.openOptionsPage();
case MSG_SAVE_RULE:

View File

@@ -703,9 +703,9 @@ export const I18N = {
zh_TW: `OpenAI 提示詞`,
},
if_clear_cache: {
zh: `是否清除缓存`,
en: `Whether clear cache`,
zh_TW: `是否清除快取`,
zh: `是否清除缓存默认缓存7天`,
en: `Whether clear cache (Default cache is 7 days)`,
zh_TW: `是否清除快取預設快取7天`,
},
clear_cache_never: {
zh: `不清除缓存`,

View File

@@ -58,6 +58,7 @@ export const CACHE_NAME = `${APP_NAME}_cache`;
export const MSG_FETCH = "fetch";
export const MSG_GET_HTTPCACHE = "get_httpcache";
export const MSG_PUT_HTTPCACHE = "put_httpcache";
export const MSG_OPEN_OPTIONS = "open_options";
export const MSG_SAVE_RULE = "save_rule";
export const MSG_TRANS_TOGGLE = "trans_toggle";
@@ -551,6 +552,7 @@ export const DEFAULT_SUBRULES_LIST = [
];
export const DEFAULT_HTTP_TIMEOUT = 5000; // 调用超时时间
export const DEFAULT_CACHE_TIMEOUT = 3600 * 24 * 7; // 缓存超时时间(7天)
// 翻译接口
const defaultCustomApi = {

View File

@@ -1,6 +1,6 @@
import { getMsauth, setMsauth } from "./storage";
import { URL_MICROSOFT_AUTH } from "../config";
import { fetchHandle } from "./fetch";
import { fetchData } from "./fetch";
import { kissLog } from "./log";
const parseMSToken = (token) => {
@@ -35,7 +35,7 @@ const _msAuth = () => {
}
// 缓存没有或失效,查询接口
token = await fetchHandle({ input: URL_MICROSOFT_AUTH });
token = await fetchData(URL_MICROSOFT_AUTH);
exp = parseMSToken(token);
await setMsauth({ token, exp });
return [token, exp];

137
src/libs/cache.js Normal file
View File

@@ -0,0 +1,137 @@
import {
CACHE_NAME,
DEFAULT_CACHE_TIMEOUT,
MSG_GET_HTTPCACHE,
MSG_PUT_HTTPCACHE,
} from "../config";
import { kissLog } from "./log";
import { isExt } from "./client";
import { isBg } from "./browser";
import { sendBgMsg } from "./msg";
import { blobToBase64 } from "./utils";
/**
* 构造缓存 request
* @param {*} input
* @param {*} init
* @returns
*/
const newCacheReq = async (input, init) => {
let request = new Request(input, init);
if (request.method !== "GET") {
const body = await request.text();
const cacheUrl = new URL(request.url);
cacheUrl.pathname += body;
request = new Request(cacheUrl.toString(), { method: "GET" });
}
return request;
};
/**
* 查询 caches
* @param {*} input
* @param {*} init
* @returns
*/
export const getHttpCache = async (input, init) => {
try {
const req = await newCacheReq(input, init);
const cache = await caches.open(CACHE_NAME);
const res = await cache.match(req);
if (res) {
return await parseResponse(res);
}
} catch (err) {
kissLog(err, "get cache");
}
return null;
};
/**
* 插入 caches
* @param {*} input
* @param {*} init
* @param {*} data
*/
export const putHttpCache = async (input, init, data) => {
try {
const req = await newCacheReq(input, init);
const cache = await caches.open(CACHE_NAME);
const res = new Response(JSON.stringify(data), {
status: 200,
headers: {
"Content-Type": "application/json",
"Cache-Control": `max-age=${DEFAULT_CACHE_TIMEOUT}`,
},
});
// res.headers.set("Cache-Control", `max-age=${DEFAULT_CACHE_TIMEOUT}`);
await cache.put(req, res);
} catch (err) {
kissLog(err, "put cache");
}
};
/**
* 解析 response
* @param {*} res
* @returns
*/
export const parseResponse = async (res) => {
if (!res) {
throw new Error("Response object does not exist");
}
if (!res.ok) {
const msg = {
url: res.url,
status: res.status,
};
if (res.headers.get("Content-Type")?.includes("json")) {
msg.response = await res.json();
}
throw new Error(JSON.stringify(msg));
}
const contentType = res.headers.get("Content-Type");
if (contentType?.includes("json")) {
return res.json();
} else if (contentType?.includes("audio")) {
const blob = await res.blob();
return blobToBase64(blob);
}
return res.text();
};
/**
* getHttpCache 兼容性封装
* @param {*} input
* @param {*} init
* @returns
*/
export const getHttpCachePolyfill = (input, init) => {
// 插件
if (isExt && !isBg()) {
return sendBgMsg(MSG_GET_HTTPCACHE, { input, init });
}
// 油猴/网页/BackgroundPage
return getHttpCache(input, init);
};
/**
* putHttpCache 兼容性封装
* @param {*} input
* @param {*} init
* @param {*} data
* @returns
*/
export const putHttpCachePolyfill = (input, init, data) => {
// 插件
if (isExt && !isBg()) {
return sendBgMsg(MSG_PUT_HTTPCACHE, { input, init, data });
}
// 油猴/网页/BackgroundPage
return putHttpCache(input, init, data);
};

View File

@@ -1,38 +1,11 @@
import { isExt, isGm } from "./client";
import { sendBgMsg } from "./msg";
import { taskPool } from "./pool";
import { getSettingWithDefault } from "./storage";
import {
MSG_FETCH,
MSG_GET_HTTPCACHE,
CACHE_NAME,
DEFAULT_FETCH_INTERVAL,
DEFAULT_FETCH_LIMIT,
DEFAULT_HTTP_TIMEOUT,
} from "../config";
import { MSG_FETCH, DEFAULT_HTTP_TIMEOUT } from "../config";
import { isBg } from "./browser";
import { genTransReq } from "../apis/trans";
import { kissLog } from "./log";
import { blobToBase64 } from "./utils";
/**
* 构造缓存 request
* @param {*} input
* @param {*} init
* @returns
*/
const newCacheReq = async (input, init) => {
let request = new Request(input, init);
if (request.method !== "GET") {
const body = await request.text();
const cacheUrl = new URL(request.url);
cacheUrl.pathname += body;
request = new Request(cacheUrl.toString(), { method: "GET" });
}
return request;
};
import { getFetchPool } from "./pool";
import { getHttpCachePolyfill, parseResponse } from "./cache";
/**
* 油猴脚本的请求封装
@@ -73,56 +46,25 @@ export const fetchGM = async (
/**
* 发起请求
* @param {*} param0
* @param {*} input
* @param {*} init
* @param {*} opts
* @returns
*/
export const fetchPatcher = async (input, init, transOpts, apiSetting) => {
if (transOpts?.translator) {
[input, init] = await genTransReq(transOpts, apiSetting);
}
if (!input) {
throw new Error("url is empty");
}
let timeout = apiSetting?.httpTimeout || DEFAULT_HTTP_TIMEOUT;
if (!apiSetting) {
export const fetchPatcher = async (input, init = {}, opts) => {
let timeout = opts?.httpTimeout;
if (!timeout) {
try {
timeout = (await getSettingWithDefault()).httpTimeout;
} catch (err) {
//
kissLog(err, "getSettingWithDefault");
}
}
if (!timeout) {
timeout = DEFAULT_HTTP_TIMEOUT;
}
if (isGm) {
// let info;
// if (window.KISS_GM) {
// info = await window.KISS_GM.getInfo();
// } else {
// info = GM.info;
// }
// Tampermonkey --> .connects
// Violentmonkey --> .connect
// const connects = info?.script?.connects || info?.script?.connect || [];
// const url = new URL(input);
// const isSafe = connects.find((item) => url.hostname.endsWith(item));
// if (isSafe) {
// // todo: 自定义接口 init 可能包含了 signal
// Object.assign(init, { timeout });
// const { body, headers, status, statusText } = window.KISS_GM
// ? await window.KISS_GM.fetch(input, init)
// : await fetchGM(input, init);
// return new Response(body, {
// headers: new Headers(headers),
// status,
// statusText,
// });
// }
// todo: 自定义接口 init 可能包含了 signal
Object.assign(init, { timeout });
@@ -144,92 +86,13 @@ export const fetchPatcher = async (input, init, transOpts, apiSetting) => {
return fetch(input, init);
};
/**
* 解析 response
* @param {*} res
* @returns
*/
const parseResponse = async (res) => {
if (!res) {
return null;
}
const contentType = res.headers.get("Content-Type");
if (contentType?.includes("json")) {
return await res.json();
} else if (contentType?.includes("audio")) {
const blob = await res.blob();
return await blobToBase64(blob);
}
return await res.text();
};
/**
* 查询 caches
* @param {*} input
* @param {*} param1
* @returns
*/
export const getHttpCache = async (input, { method, headers, body }) => {
try {
const req = await newCacheReq(input, { method, headers, body });
const cache = await caches.open(CACHE_NAME);
const res = await cache.match(req);
return parseResponse(res);
} catch (err) {
kissLog(err, "get cache");
}
return null;
};
/**
* 插入 caches
* @param {*} input
* @param {*} param1
* @param {*} res
*/
export const putHttpCache = async (input, { method, headers, body }, res) => {
try {
const req = await newCacheReq(input, { method, headers, body });
const cache = await caches.open(CACHE_NAME);
await cache.put(req, res);
} catch (err) {
kissLog(err, "put cache");
}
};
/**
* 处理请求
* @param {*} param0
* @returns
*/
export const fetchHandle = async ({
input,
useCache,
transOpts,
apiSetting,
...init
}) => {
// 发送请求
const res = await fetchPatcher(input, init, transOpts, apiSetting);
if (!res) {
throw new Error("Unknow error");
} else if (!res.ok) {
const msg = {
url: res.url,
status: res.status,
};
if (res.headers.get("Content-Type")?.includes("json")) {
msg.response = await res.json();
}
throw new Error(JSON.stringify(msg));
}
// 插入缓存
if (useCache) {
await putHttpCache(input, init, res.clone());
}
export const fetchHandle = async ({ input, init, opts }) => {
const res = await fetchPatcher(input, init, opts);
return parseResponse(res);
};
@@ -238,7 +101,7 @@ export const fetchHandle = async ({
* @param {*} args
* @returns
*/
export const fetchPolyfill = (args) => {
const fetchPolyfill = (args) => {
// 插件
if (isExt && !isBg()) {
return sendBgMsg(MSG_FETCH, args);
@@ -248,72 +111,36 @@ export const fetchPolyfill = (args) => {
return fetchHandle(args);
};
/**
* getHttpCache 兼容性封装
* @param {*} input
* @param {*} init
* @returns
*/
export const getHttpCachePolyfill = (input, init) => {
// 插件
if (isExt && !isBg()) {
return sendBgMsg(MSG_GET_HTTPCACHE, { input, init });
}
// 油猴/网页/BackgroundPage
return getHttpCache(input, init);
};
/**
* 请求池实例
*/
export const fetchPool = taskPool(
fetchPolyfill,
null,
DEFAULT_FETCH_INTERVAL,
DEFAULT_FETCH_LIMIT
);
/**
* 数据请求
* @param {*} input
* @param {*} init
* @param {*} param1
* @returns
*/
export const fetchData = async (input, { useCache, usePool, ...args } = {}) => {
export const fetchData = async (
input,
init,
{ useCache, usePool, fetchInterval, fetchLimit, ...opts } = {}
) => {
if (!input?.trim()) {
throw new Error("URL is empty");
}
// 查询缓存
// 使用缓存数据
if (useCache) {
const cache = await getHttpCachePolyfill(input, args);
if (cache) {
return cache;
const resCache = await getHttpCachePolyfill(input, init);
if (resCache) {
return resCache;
}
}
// 通过任务池发送请求
if (usePool) {
return fetchPool.push({ input, useCache, ...args });
const fetchPool = getFetchPool(fetchInterval, fetchLimit);
return fetchPool.push(fetchPolyfill, { input, init, opts });
}
// 直接请求
return fetchPolyfill({ input, useCache, ...args });
};
/**
* 更新 fetch pool 参数
* @param {*} interval
* @param {*} limit
*/
export const updateFetchPool = (interval, limit) => {
fetchPool.update(interval, limit);
};
/**
* 清空任务池
*/
export const clearFetchPool = () => {
fetchPool.clear();
return fetchPolyfill({ input, init, opts });
};

View File

@@ -1,3 +1,4 @@
import { DEFAULT_FETCH_INTERVAL, DEFAULT_FETCH_LIMIT } from "../config";
import { kissLog } from "./log";
/**
@@ -8,13 +9,7 @@ import { kissLog } from "./log";
* @param {*} _limit
* @returns
*/
export const taskPool = (
fn,
preFn,
_interval = 100,
_limit = 100,
_retryInteral = 1000
) => {
const taskPool = (_interval = 100, _limit = 100, _retryInteral = 1000) => {
const pool = [];
const maxRetry = 2; // 最大重试次数
let maxCount = _limit; // 最大数量
@@ -31,10 +26,9 @@ export const taskPool = (
const item = pool.shift();
if (item) {
curCount++;
const { args, resolve, reject, retry } = item;
const { fn, args, resolve, reject, retry } = item;
try {
const preArgs = preFn ? await preFn(item.args) : {};
const res = await fn({ ...args, ...preArgs });
const res = await fn(args);
resolve(res);
} catch (err) {
kissLog(err, "task");
@@ -54,12 +48,12 @@ export const taskPool = (
};
return {
push: async (args) => {
push: async (fn, args) => {
if (!timer) {
run();
}
return new Promise((resolve, reject) => {
pool.push({ args, resolve, reject, retry: 0 });
pool.push({ fn, args, resolve, reject, retry: 0 });
});
},
update: (_interval = 100, _limit = 100) => {
@@ -78,3 +72,40 @@ export const taskPool = (
},
};
};
/**
* 请求池实例
*/
let fetchPool;
/**
* 获取请求池实例
*/
export const getFetchPool = (interval, limit) => {
if (!fetchPool) {
fetchPool = taskPool(
interval ?? DEFAULT_FETCH_INTERVAL,
limit ?? DEFAULT_FETCH_LIMIT
);
} else if (interval && limit) {
updateFetchPool(interval, limit);
}
return fetchPool;
};
/**
* 更新请求池参数
* @param {*} interval
* @param {*} limit
*/
export const updateFetchPool = (interval, limit) => {
fetchPool && fetchPool.update(interval, limit);
};
/**
* 清空请求池
*/
export const clearFetchPool = () => {
fetchPool && fetchPool.clear();
};

View File

@@ -13,11 +13,9 @@ import {
OPT_TIMING_PAGEOPEN,
OPT_TIMING_MOUSEOVER,
DEFAULT_TRANS_APIS,
DEFAULT_FETCH_LIMIT,
DEFAULT_FETCH_INTERVAL,
} from "../config";
import Content from "../views/Content";
import { updateFetchPool, clearFetchPool } from "./fetch";
import { clearFetchPool } from "./pool";
import { debounce, genEventName, getHtmlText } from "./utils";
import { runFixer } from "./webfix";
import { apiTranslate } from "../apis";
@@ -107,18 +105,6 @@ export class Translator {
};
};
_updatePool(translator) {
if (!translator) {
return;
}
const {
fetchInterval = DEFAULT_FETCH_INTERVAL,
fetchLimit = DEFAULT_FETCH_LIMIT,
} = this._setting.transApis[translator] || {};
updateFetchPool(fetchInterval, fetchLimit);
}
constructor(rule, setting) {
this._overrideAttachShadow();
@@ -131,8 +117,6 @@ export class Translator {
.map((item) => item.split(",").map((item) => item.trim()))
.filter(([term]) => Boolean(term));
this._updatePool(rule.translator);
if (rule.transOpen === "true") {
this._register();
}
@@ -169,7 +153,6 @@ export class Translator {
updateRule = (obj) => {
this.rule = { ...this.rule, ...obj };
this._updatePool(obj.translator);
};
toggle = () => {