feat: custom request
This commit is contained in:
@@ -355,9 +355,11 @@ export const OPT_TIMING_ALL = [
|
|||||||
export const DEFAULT_FETCH_LIMIT = 10; // 默认最大任务数量
|
export const DEFAULT_FETCH_LIMIT = 10; // 默认最大任务数量
|
||||||
export const DEFAULT_FETCH_INTERVAL = 100; // 默认任务间隔时间
|
export const DEFAULT_FETCH_INTERVAL = 100; // 默认任务间隔时间
|
||||||
|
|
||||||
export const PROMPT_PLACE_FROM = "{{from}}"; // 占位符
|
export const INPUT_PLACE_URL = "{{url}}"; // 占位符
|
||||||
export const PROMPT_PLACE_TO = "{{to}}"; // 占位符
|
export const INPUT_PLACE_FROM = "{{from}}"; // 占位符
|
||||||
export const PROMPT_PLACE_TEXT = "{{text}}"; // 占位符
|
export const INPUT_PLACE_TO = "{{to}}"; // 占位符
|
||||||
|
export const INPUT_PLACE_TEXT = "{{text}}"; // 占位符
|
||||||
|
export const INPUT_PLACE_KEY = "{{key}}"; // 占位符
|
||||||
|
|
||||||
export const DEFAULT_COLOR = "#209CEE"; // 默认高亮背景色/线条颜色
|
export const DEFAULT_COLOR = "#209CEE"; // 默认高亮背景色/线条颜色
|
||||||
|
|
||||||
@@ -456,6 +458,7 @@ export const DEFAULT_SUBRULES_LIST = [
|
|||||||
const defaultCustomApi = {
|
const defaultCustomApi = {
|
||||||
url: "",
|
url: "",
|
||||||
key: "",
|
key: "",
|
||||||
|
customRequest: "",
|
||||||
fetchLimit: DEFAULT_FETCH_LIMIT,
|
fetchLimit: DEFAULT_FETCH_LIMIT,
|
||||||
fetchInterval: DEFAULT_FETCH_INTERVAL,
|
fetchInterval: DEFAULT_FETCH_INTERVAL,
|
||||||
};
|
};
|
||||||
@@ -506,7 +509,7 @@ export const DEFAULT_TRANS_APIS = {
|
|||||||
url: "https://api.openai.com/v1/chat/completions",
|
url: "https://api.openai.com/v1/chat/completions",
|
||||||
key: "",
|
key: "",
|
||||||
model: "gpt-4",
|
model: "gpt-4",
|
||||||
prompt: `You will be provided with a sentence in ${PROMPT_PLACE_FROM}, and your task is to translate it into ${PROMPT_PLACE_TO}.`,
|
prompt: `You will be provided with a sentence in ${INPUT_PLACE_FROM}, and your task is to translate it into ${INPUT_PLACE_TO}.`,
|
||||||
fetchLimit: 1,
|
fetchLimit: 1,
|
||||||
fetchInterval: 500,
|
fetchInterval: 500,
|
||||||
},
|
},
|
||||||
@@ -514,7 +517,7 @@ export const DEFAULT_TRANS_APIS = {
|
|||||||
url: "https://generativelanguage.googleapis.com/v1/models",
|
url: "https://generativelanguage.googleapis.com/v1/models",
|
||||||
key: "",
|
key: "",
|
||||||
model: "gemini-pro",
|
model: "gemini-pro",
|
||||||
prompt: `Translate the following text from ${PROMPT_PLACE_FROM} to ${PROMPT_PLACE_TO}:\n\n${PROMPT_PLACE_TEXT}`,
|
prompt: `Translate the following text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}:\n\n${INPUT_PLACE_TEXT}`,
|
||||||
fetchLimit: 1,
|
fetchLimit: 1,
|
||||||
fetchInterval: 500,
|
fetchInterval: 500,
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -18,13 +18,16 @@ import {
|
|||||||
OPT_TRANS_CUSTOMIZE_5,
|
OPT_TRANS_CUSTOMIZE_5,
|
||||||
URL_MICROSOFT_TRAN,
|
URL_MICROSOFT_TRAN,
|
||||||
URL_TENCENT_TRANSMART,
|
URL_TENCENT_TRANSMART,
|
||||||
PROMPT_PLACE_FROM,
|
INPUT_PLACE_URL,
|
||||||
PROMPT_PLACE_TO,
|
INPUT_PLACE_FROM,
|
||||||
PROMPT_PLACE_TEXT,
|
INPUT_PLACE_TO,
|
||||||
|
INPUT_PLACE_TEXT,
|
||||||
|
INPUT_PLACE_KEY,
|
||||||
} from "../config";
|
} from "../config";
|
||||||
import { msAuth } from "./auth";
|
import { msAuth } from "./auth";
|
||||||
import { genDeeplFree } from "../apis/deepl";
|
import { genDeeplFree } from "../apis/deepl";
|
||||||
import { genBaidu } from "../apis/baidu";
|
import { genBaidu } from "../apis/baidu";
|
||||||
|
import { kissLog } from "./log";
|
||||||
|
|
||||||
const keyMap = new Map();
|
const keyMap = new Map();
|
||||||
|
|
||||||
@@ -194,8 +197,8 @@ const genTencent = ({ text, from, to }) => {
|
|||||||
|
|
||||||
const genOpenAI = ({ text, from, to, url, key, prompt, model }) => {
|
const genOpenAI = ({ text, from, to, url, key, prompt, model }) => {
|
||||||
prompt = prompt
|
prompt = prompt
|
||||||
.replaceAll(PROMPT_PLACE_FROM, from)
|
.replaceAll(INPUT_PLACE_FROM, from)
|
||||||
.replaceAll(PROMPT_PLACE_TO, to);
|
.replaceAll(INPUT_PLACE_TO, to);
|
||||||
|
|
||||||
const data = {
|
const data = {
|
||||||
model,
|
model,
|
||||||
@@ -228,9 +231,9 @@ const genOpenAI = ({ text, from, to, url, key, prompt, model }) => {
|
|||||||
|
|
||||||
const genGemini = ({ text, from, to, url, key, prompt, model }) => {
|
const genGemini = ({ text, from, to, url, key, prompt, model }) => {
|
||||||
prompt = prompt
|
prompt = prompt
|
||||||
.replaceAll(PROMPT_PLACE_FROM, from)
|
.replaceAll(INPUT_PLACE_FROM, from)
|
||||||
.replaceAll(PROMPT_PLACE_TO, to)
|
.replaceAll(INPUT_PLACE_TO, to)
|
||||||
.replaceAll(PROMPT_PLACE_TEXT, text);
|
.replaceAll(INPUT_PLACE_TEXT, text);
|
||||||
|
|
||||||
const data = {
|
const data = {
|
||||||
contents: [
|
contents: [
|
||||||
@@ -276,7 +279,14 @@ const genCloudflareAI = ({ text, from, to, url, key }) => {
|
|||||||
return [url, init];
|
return [url, init];
|
||||||
};
|
};
|
||||||
|
|
||||||
const genCustom = ({ text, from, to, url, key }) => {
|
const genCustom = ({ text, from, to, url, key, customRequest = "" }) => {
|
||||||
|
const replaceInput = (str) =>
|
||||||
|
str
|
||||||
|
.replaceAll(INPUT_PLACE_URL, url)
|
||||||
|
.replaceAll(INPUT_PLACE_FROM, from)
|
||||||
|
.replaceAll(INPUT_PLACE_TO, to)
|
||||||
|
.replaceAll(INPUT_PLACE_TEXT, text)
|
||||||
|
.replaceAll(INPUT_PLACE_KEY, key);
|
||||||
const data = {
|
const data = {
|
||||||
text,
|
text,
|
||||||
from,
|
from,
|
||||||
@@ -292,6 +302,23 @@ const genCustom = ({ text, from, to, url, key }) => {
|
|||||||
if (key) {
|
if (key) {
|
||||||
init.headers.Authorization = `Bearer ${key}`;
|
init.headers.Authorization = `Bearer ${key}`;
|
||||||
}
|
}
|
||||||
|
url = replaceInput(url);
|
||||||
|
|
||||||
|
if (customRequest.trim()) {
|
||||||
|
try {
|
||||||
|
const req = JSON.parse(replaceInput(customRequest));
|
||||||
|
req.url && (url = req.url);
|
||||||
|
req.headers && (init.headers = req.headers);
|
||||||
|
req.method && (init.method = req.method);
|
||||||
|
if (init.method === "GET") {
|
||||||
|
delete init.body;
|
||||||
|
} else {
|
||||||
|
req.body && (init.body = JSON.stringify(req.body));
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
kissLog(err, "parse custom request");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return [url, init];
|
return [url, init];
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -113,6 +113,7 @@ function ApiFields({ translator }) {
|
|||||||
fetchInterval = DEFAULT_FETCH_INTERVAL,
|
fetchInterval = DEFAULT_FETCH_INTERVAL,
|
||||||
dictNo = "",
|
dictNo = "",
|
||||||
memoryNo = "",
|
memoryNo = "",
|
||||||
|
customRequest = "",
|
||||||
} = api;
|
} = api;
|
||||||
|
|
||||||
const handleChange = (e) => {
|
const handleChange = (e) => {
|
||||||
@@ -222,6 +223,17 @@ function ApiFields({ translator }) {
|
|||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{translator.startsWith(OPT_TRANS_CUSTOMIZE) && (
|
||||||
|
<TextField
|
||||||
|
size="small"
|
||||||
|
label={"CUSTOM REQUEST"}
|
||||||
|
name="customRequest"
|
||||||
|
value={customRequest}
|
||||||
|
onChange={handleChange}
|
||||||
|
multiline
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
<TextField
|
<TextField
|
||||||
size="small"
|
size="small"
|
||||||
label={i18n("fetch_limit")}
|
label={i18n("fetch_limit")}
|
||||||
|
|||||||
Reference in New Issue
Block a user