diff --git a/src/apis/trans.js b/src/apis/trans.js
index d8128b1..1e58a34 100644
--- a/src/apis/trans.js
+++ b/src/apis/trans.js
@@ -22,12 +22,14 @@ import {
API_SPE_TYPES,
INPUT_PLACE_FROM,
INPUT_PLACE_TO,
- // INPUT_PLACE_TEXT,
+ INPUT_PLACE_TEXT,
INPUT_PLACE_KEY,
INPUT_PLACE_MODEL,
DEFAULT_USER_AGENT,
defaultSystemPrompt,
defaultSubtitlePrompt,
+ defaultNobatchPrompt,
+ defaultNobatchUserPrompt,
} from "../config";
import { msAuth } from "../libs/auth";
import { genDeeplFree } from "./deepl";
@@ -66,38 +68,41 @@ const genSystemPrompt = ({ systemPrompt, from, to }) =>
.replaceAll(INPUT_PLACE_TO, to);
const genUserPrompt = ({
- // userPrompt,
+ nobatchUserPrompt,
+ useBatchFetch,
tone,
glossary = {},
- // from,
+ from,
to,
texts,
docInfo,
}) => {
- const prompt = JSON.stringify({
- targetLanguage: to,
- title: docInfo.title,
- description: docInfo.description,
- segments: texts.map((text, i) => ({ id: i, text })),
- glossary,
- tone,
- });
+ if (useBatchFetch) {
+ return JSON.stringify({
+ targetLanguage: to,
+ title: docInfo.title,
+ description: docInfo.description,
+ segments: texts.map((text, i) => ({ id: i, text })),
+ glossary,
+ tone,
+ });
+ }
- // if (userPrompt.includes(INPUT_PLACE_TEXT)) {
- // return userPrompt
- // .replaceAll(INPUT_PLACE_FROM, from)
- // .replaceAll(INPUT_PLACE_TO, to)
- // .replaceAll(INPUT_PLACE_TEXT, prompt);
- // }
-
- return prompt;
+ return nobatchUserPrompt
+ .replaceAll(INPUT_PLACE_FROM, from)
+ .replaceAll(INPUT_PLACE_TO, to)
+ .replaceAll(INPUT_PLACE_TEXT, texts[0]);
};
-const parseAIRes = (raw) => {
+const parseAIRes = (raw, useBatchFetch = true) => {
if (!raw) {
return [];
}
+ if (!useBatchFetch) {
+ return [[raw]];
+ }
+
try {
const jsonString = extractJson(raw);
if (!jsonString) return [];
@@ -627,7 +632,10 @@ export const genTransReq = async ({ reqHook, ...args }) => {
apiSlug,
key,
systemPrompt,
- userPrompt,
+ // userPrompt,
+ nobatchPrompt = defaultNobatchPrompt,
+ nobatchUserPrompt = defaultNobatchUserPrompt,
+ useBatchFetch,
from,
to,
texts,
@@ -647,11 +655,16 @@ export const genTransReq = async ({ reqHook, ...args }) => {
}
if (API_SPE_TYPES.ai.has(apiType)) {
- args.systemPrompt = genSystemPrompt({ systemPrompt, from, to });
+ args.systemPrompt = genSystemPrompt({
+ systemPrompt: useBatchFetch ? systemPrompt : nobatchPrompt,
+ from,
+ to,
+ });
args.userPrompt = !!events
? JSON.stringify(events)
: genUserPrompt({
- userPrompt,
+ nobatchUserPrompt,
+ useBatchFetch,
from,
to,
texts,
@@ -721,6 +734,7 @@ export const parseTransRes = async (
history,
userMsg,
apiType,
+ useBatchFetch,
}
) => {
// 执行 response hook
@@ -811,13 +825,13 @@ export const parseTransRes = async (
content: modelMsg.content,
});
}
- return parseAIRes(modelMsg?.content);
+ return parseAIRes(modelMsg?.content, useBatchFetch);
case OPT_TRANS_GEMINI:
modelMsg = res?.candidates?.[0]?.content;
if (history && userMsg && modelMsg) {
history.add(userMsg, modelMsg);
}
- return parseAIRes(res?.candidates?.[0]?.content?.parts?.[0]?.text ?? "");
+ return parseAIRes(modelMsg?.parts?.[0]?.text ?? "", useBatchFetch);
case OPT_TRANS_CLAUDE:
modelMsg = { role: res?.role, content: res?.content?.text };
if (history && userMsg && modelMsg) {
@@ -826,7 +840,7 @@ export const parseTransRes = async (
content: modelMsg.content,
});
}
- return parseAIRes(res?.content?.[0]?.text ?? "");
+ return parseAIRes(res?.content?.[0]?.text ?? "", useBatchFetch);
case OPT_TRANS_CLOUDFLAREAI:
return [[res?.result?.translated_text]];
case OPT_TRANS_OLLAMA:
@@ -845,7 +859,7 @@ export const parseTransRes = async (
content: modelMsg.content,
});
}
- return parseAIRes(modelMsg?.content);
+ return parseAIRes(modelMsg?.content, useBatchFetch);
case OPT_TRANS_CUSTOMIZE:
return (res?.translations ?? res)?.map((item) => [item.text, item.src]);
default:
diff --git a/src/config/api.js b/src/config/api.js
index a21c8c5..0e6e03d 100644
--- a/src/config/api.js
+++ b/src/config/api.js
@@ -340,6 +340,9 @@ Object.entries(OPT_LANGS_TO_SPEC).forEach(([t, m]) => {
OPT_LANGS_TO_CODE[t] = specToCode(m);
});
+export const defaultNobatchPrompt = `You are a professional, authentic machine translation engine.`;
+export const defaultNobatchUserPrompt = `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`;
+
export const defaultSystemPrompt = `Act as a translation API. Output a single raw JSON object only. No extra text or fences.
Input:
@@ -430,6 +433,8 @@ const defaultApi = {
model: "", // 模型名称
systemPrompt: defaultSystemPrompt,
subtitlePrompt: defaultSubtitlePrompt,
+ nobatchPrompt: defaultNobatchPrompt,
+ nobatchUserPrompt: defaultNobatchUserPrompt,
userPrompt: "",
tone: BUILTIN_STONES[0], // 翻译风格
placeholder: BUILTIN_PLACEHOLDERS[0], // 占位符
diff --git a/src/hooks/Alert.js b/src/hooks/Alert.js
index 2cabcb7..85b9c2f 100644
--- a/src/hooks/Alert.js
+++ b/src/hooks/Alert.js
@@ -62,7 +62,7 @@ export function AlertProvider({ children }) {