feat: custom request

This commit is contained in:
Gabe Yuan
2024-04-17 17:38:54 +08:00
parent efc51b0d46
commit e369321c66
3 changed files with 56 additions and 14 deletions

View File

@@ -355,9 +355,11 @@ export const OPT_TIMING_ALL = [
export const DEFAULT_FETCH_LIMIT = 10; // 默认最大任务数量
export const DEFAULT_FETCH_INTERVAL = 100; // 默认任务间隔时间
export const PROMPT_PLACE_FROM = "{{from}}"; // 占位符
export const PROMPT_PLACE_TO = "{{to}}"; // 占位符
export const PROMPT_PLACE_TEXT = "{{text}}"; // 占位符
export const INPUT_PLACE_URL = "{{url}}"; // 占位符
export const INPUT_PLACE_FROM = "{{from}}"; // 占位符
export const INPUT_PLACE_TO = "{{to}}"; // 占位符
export const INPUT_PLACE_TEXT = "{{text}}"; // 占位符
export const INPUT_PLACE_KEY = "{{key}}"; // 占位符
export const DEFAULT_COLOR = "#209CEE"; // 默认高亮背景色/线条颜色
@@ -456,6 +458,7 @@ export const DEFAULT_SUBRULES_LIST = [
const defaultCustomApi = {
url: "",
key: "",
customRequest: "",
fetchLimit: DEFAULT_FETCH_LIMIT,
fetchInterval: DEFAULT_FETCH_INTERVAL,
};
@@ -506,7 +509,7 @@ export const DEFAULT_TRANS_APIS = {
url: "https://api.openai.com/v1/chat/completions",
key: "",
model: "gpt-4",
prompt: `You will be provided with a sentence in ${PROMPT_PLACE_FROM}, and your task is to translate it into ${PROMPT_PLACE_TO}.`,
prompt: `You will be provided with a sentence in ${INPUT_PLACE_FROM}, and your task is to translate it into ${INPUT_PLACE_TO}.`,
fetchLimit: 1,
fetchInterval: 500,
},
@@ -514,7 +517,7 @@ export const DEFAULT_TRANS_APIS = {
url: "https://generativelanguage.googleapis.com/v1/models",
key: "",
model: "gemini-pro",
prompt: `Translate the following text from ${PROMPT_PLACE_FROM} to ${PROMPT_PLACE_TO}:\n\n${PROMPT_PLACE_TEXT}`,
prompt: `Translate the following text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}:\n\n${INPUT_PLACE_TEXT}`,
fetchLimit: 1,
fetchInterval: 500,
},

View File

@@ -18,13 +18,16 @@ import {
OPT_TRANS_CUSTOMIZE_5,
URL_MICROSOFT_TRAN,
URL_TENCENT_TRANSMART,
PROMPT_PLACE_FROM,
PROMPT_PLACE_TO,
PROMPT_PLACE_TEXT,
INPUT_PLACE_URL,
INPUT_PLACE_FROM,
INPUT_PLACE_TO,
INPUT_PLACE_TEXT,
INPUT_PLACE_KEY,
} from "../config";
import { msAuth } from "./auth";
import { genDeeplFree } from "../apis/deepl";
import { genBaidu } from "../apis/baidu";
import { kissLog } from "./log";
const keyMap = new Map();
@@ -194,8 +197,8 @@ const genTencent = ({ text, from, to }) => {
const genOpenAI = ({ text, from, to, url, key, prompt, model }) => {
prompt = prompt
.replaceAll(PROMPT_PLACE_FROM, from)
.replaceAll(PROMPT_PLACE_TO, to);
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to);
const data = {
model,
@@ -228,9 +231,9 @@ const genOpenAI = ({ text, from, to, url, key, prompt, model }) => {
const genGemini = ({ text, from, to, url, key, prompt, model }) => {
prompt = prompt
.replaceAll(PROMPT_PLACE_FROM, from)
.replaceAll(PROMPT_PLACE_TO, to)
.replaceAll(PROMPT_PLACE_TEXT, text);
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
const data = {
contents: [
@@ -276,7 +279,14 @@ const genCloudflareAI = ({ text, from, to, url, key }) => {
return [url, init];
};
const genCustom = ({ text, from, to, url, key }) => {
const genCustom = ({ text, from, to, url, key, customRequest = "" }) => {
const replaceInput = (str) =>
str
.replaceAll(INPUT_PLACE_URL, url)
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text)
.replaceAll(INPUT_PLACE_KEY, key);
const data = {
text,
from,
@@ -292,6 +302,23 @@ const genCustom = ({ text, from, to, url, key }) => {
if (key) {
init.headers.Authorization = `Bearer ${key}`;
}
url = replaceInput(url);
if (customRequest.trim()) {
try {
const req = JSON.parse(replaceInput(customRequest));
req.url && (url = req.url);
req.headers && (init.headers = req.headers);
req.method && (init.method = req.method);
if (init.method === "GET") {
delete init.body;
} else {
req.body && (init.body = JSON.stringify(req.body));
}
} catch (err) {
kissLog(err, "parse custom request");
}
}
return [url, init];
};

View File

@@ -113,6 +113,7 @@ function ApiFields({ translator }) {
fetchInterval = DEFAULT_FETCH_INTERVAL,
dictNo = "",
memoryNo = "",
customRequest = "",
} = api;
const handleChange = (e) => {
@@ -222,6 +223,17 @@ function ApiFields({ translator }) {
</>
)}
{translator.startsWith(OPT_TRANS_CUSTOMIZE) && (
<TextField
size="small"
label={"CUSTOM REQUEST"}
name="customRequest"
value={customRequest}
onChange={handleChange}
multiline
/>
)}
<TextField
size="small"
label={i18n("fetch_limit")}