feat: support ai model with no batch

This commit is contained in:
Gabe
2025-10-31 14:36:33 +08:00
parent 53d441b3f5
commit 0a4a2b46c1
4 changed files with 94 additions and 40 deletions

View File

@@ -22,12 +22,14 @@ import {
API_SPE_TYPES,
INPUT_PLACE_FROM,
INPUT_PLACE_TO,
// INPUT_PLACE_TEXT,
INPUT_PLACE_TEXT,
INPUT_PLACE_KEY,
INPUT_PLACE_MODEL,
DEFAULT_USER_AGENT,
defaultSystemPrompt,
defaultSubtitlePrompt,
defaultNobatchPrompt,
defaultNobatchUserPrompt,
} from "../config";
import { msAuth } from "../libs/auth";
import { genDeeplFree } from "./deepl";
@@ -66,38 +68,41 @@ const genSystemPrompt = ({ systemPrompt, from, to }) =>
.replaceAll(INPUT_PLACE_TO, to);
const genUserPrompt = ({
// userPrompt,
nobatchUserPrompt,
useBatchFetch,
tone,
glossary = {},
// from,
from,
to,
texts,
docInfo,
}) => {
const prompt = JSON.stringify({
targetLanguage: to,
title: docInfo.title,
description: docInfo.description,
segments: texts.map((text, i) => ({ id: i, text })),
glossary,
tone,
});
if (useBatchFetch) {
return JSON.stringify({
targetLanguage: to,
title: docInfo.title,
description: docInfo.description,
segments: texts.map((text, i) => ({ id: i, text })),
glossary,
tone,
});
}
// if (userPrompt.includes(INPUT_PLACE_TEXT)) {
// return userPrompt
// .replaceAll(INPUT_PLACE_FROM, from)
// .replaceAll(INPUT_PLACE_TO, to)
// .replaceAll(INPUT_PLACE_TEXT, prompt);
// }
return prompt;
return nobatchUserPrompt
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, texts[0]);
};
const parseAIRes = (raw) => {
const parseAIRes = (raw, useBatchFetch = true) => {
if (!raw) {
return [];
}
if (!useBatchFetch) {
return [[raw]];
}
try {
const jsonString = extractJson(raw);
if (!jsonString) return [];
@@ -627,7 +632,10 @@ export const genTransReq = async ({ reqHook, ...args }) => {
apiSlug,
key,
systemPrompt,
userPrompt,
// userPrompt,
nobatchPrompt = defaultNobatchPrompt,
nobatchUserPrompt = defaultNobatchUserPrompt,
useBatchFetch,
from,
to,
texts,
@@ -647,11 +655,16 @@ export const genTransReq = async ({ reqHook, ...args }) => {
}
if (API_SPE_TYPES.ai.has(apiType)) {
args.systemPrompt = genSystemPrompt({ systemPrompt, from, to });
args.systemPrompt = genSystemPrompt({
systemPrompt: useBatchFetch ? systemPrompt : nobatchPrompt,
from,
to,
});
args.userPrompt = !!events
? JSON.stringify(events)
: genUserPrompt({
userPrompt,
nobatchUserPrompt,
useBatchFetch,
from,
to,
texts,
@@ -721,6 +734,7 @@ export const parseTransRes = async (
history,
userMsg,
apiType,
useBatchFetch,
}
) => {
// 执行 response hook
@@ -811,13 +825,13 @@ export const parseTransRes = async (
content: modelMsg.content,
});
}
return parseAIRes(modelMsg?.content);
return parseAIRes(modelMsg?.content, useBatchFetch);
case OPT_TRANS_GEMINI:
modelMsg = res?.candidates?.[0]?.content;
if (history && userMsg && modelMsg) {
history.add(userMsg, modelMsg);
}
return parseAIRes(res?.candidates?.[0]?.content?.parts?.[0]?.text ?? "");
return parseAIRes(modelMsg?.parts?.[0]?.text ?? "", useBatchFetch);
case OPT_TRANS_CLAUDE:
modelMsg = { role: res?.role, content: res?.content?.text };
if (history && userMsg && modelMsg) {
@@ -826,7 +840,7 @@ export const parseTransRes = async (
content: modelMsg.content,
});
}
return parseAIRes(res?.content?.[0]?.text ?? "");
return parseAIRes(res?.content?.[0]?.text ?? "", useBatchFetch);
case OPT_TRANS_CLOUDFLAREAI:
return [[res?.result?.translated_text]];
case OPT_TRANS_OLLAMA:
@@ -845,7 +859,7 @@ export const parseTransRes = async (
content: modelMsg.content,
});
}
return parseAIRes(modelMsg?.content);
return parseAIRes(modelMsg?.content, useBatchFetch);
case OPT_TRANS_CUSTOMIZE:
return (res?.translations ?? res)?.map((item) => [item.text, item.src]);
default:

View File

@@ -340,6 +340,9 @@ Object.entries(OPT_LANGS_TO_SPEC).forEach(([t, m]) => {
OPT_LANGS_TO_CODE[t] = specToCode(m);
});
export const defaultNobatchPrompt = `You are a professional, authentic machine translation engine.`;
export const defaultNobatchUserPrompt = `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`;
export const defaultSystemPrompt = `Act as a translation API. Output a single raw JSON object only. No extra text or fences.
Input:
@@ -430,6 +433,8 @@ const defaultApi = {
model: "", // 模型名称
systemPrompt: defaultSystemPrompt,
subtitlePrompt: defaultSubtitlePrompt,
nobatchPrompt: defaultNobatchPrompt,
nobatchUserPrompt: defaultNobatchUserPrompt,
userPrompt: "",
tone: BUILTIN_STONES[0], // 翻译风格
placeholder: BUILTIN_PLACEHOLDERS[0], // 占位符

View File

@@ -62,7 +62,7 @@ export function AlertProvider({ children }) {
<Alert
onClose={handleClose}
severity={severity}
sx={{ maxWidth: "80%" }}
sx={{ minWidth: "300px", maxWidth: "80%" }}
>
{message}
</Alert>

View File

@@ -44,6 +44,8 @@ import {
BUILTIN_PLACEHOLDERS,
BUILTIN_PLACETAGS,
OPT_TRANS_AZUREAI,
defaultNobatchPrompt,
defaultNobatchUserPrompt,
} from "../../config";
import ValidationInput from "../../hooks/ValidationInput";
@@ -54,8 +56,9 @@ function TestButton({ api }) {
const handleApiTest = async () => {
try {
setLoading(true);
const text = "hello world";
const { trText } = await apiTranslate({
text: "hello world",
text,
fromLang: "en",
toLang: "zh-CN",
apiSetting: { ...api },
@@ -65,7 +68,13 @@ function TestButton({ api }) {
if (!trText) {
throw new Error("empty result");
}
alert.success(i18n("test_success"));
alert.success(
<>
<div>{i18n("test_success")}</div>
<div>{text}</div>
<div>{trText}</div>
</>
);
} catch (err) {
// alert.error(`${i18n("test_failed")}: ${err.message}`);
let msg = err.message;
@@ -164,6 +173,8 @@ function ApiFields({ apiSlug, isUserApi, deleteApi }) {
model = "",
apiType,
systemPrompt = "",
nobatchPrompt = defaultNobatchPrompt,
nobatchUserPrompt = defaultNobatchUserPrompt,
subtitlePrompt = "",
// userPrompt = "",
customHeader = "",
@@ -305,16 +316,40 @@ function ApiFields({ apiSlug, isUserApi, deleteApi }) {
</Grid>
</Box>
<TextField
size="small"
label={"SYSTEM PROMPT"}
name="systemPrompt"
value={systemPrompt}
onChange={handleChange}
multiline
maxRows={10}
helperText={i18n("system_prompt_helper")}
/>
{useBatchFetch ? (
<TextField
size="small"
label={"BATCH SYSTEM PROMPT"}
name="systemPrompt"
value={systemPrompt}
onChange={handleChange}
multiline
maxRows={10}
helperText={i18n("system_prompt_helper")}
/>
) : (
<>
<TextField
size="small"
label={"SYSTEM PROMPT"}
name="nobatchPrompt"
value={nobatchPrompt}
onChange={handleChange}
multiline
maxRows={10}
/>
<TextField
size="small"
label={"USER PROMPT"}
name="nobatchUserPrompt"
value={nobatchUserPrompt}
onChange={handleChange}
multiline
maxRows={10}
/>
</>
)}
<TextField
size="small"
label={"SUBTITLE PROMPT"}