diff --git a/src/apis/index.js b/src/apis/index.js
index 378ca4a..f95bd09 100644
--- a/src/apis/index.js
+++ b/src/apis/index.js
@@ -13,6 +13,7 @@ import {
OPT_TRANS_OPENAI_2,
OPT_TRANS_OPENAI_3,
OPT_TRANS_GEMINI,
+ OPT_TRANS_CLAUDE,
OPT_TRANS_CLOUDFLAREAI,
OPT_TRANS_OLLAMA,
OPT_TRANS_OLLAMA_2,
@@ -311,6 +312,10 @@ export const apiTranslate = async ({
.join(" ");
isSame = text === trText;
break;
+ case OPT_TRANS_CLAUDE:
+ trText = res?.content?.map((item) => item.text).join(" ");
+ isSame = text === trText;
+ break;
case OPT_TRANS_CLOUDFLAREAI:
trText = res?.result?.translated_text;
isSame = text === trText;
diff --git a/src/apis/trans.js b/src/apis/trans.js
index ec34489..3e01d7c 100644
--- a/src/apis/trans.js
+++ b/src/apis/trans.js
@@ -12,6 +12,7 @@ import {
OPT_TRANS_OPENAI_2,
OPT_TRANS_OPENAI_3,
OPT_TRANS_GEMINI,
+ OPT_TRANS_CLAUDE,
OPT_TRANS_CLOUDFLAREAI,
OPT_TRANS_OLLAMA,
OPT_TRANS_OLLAMA_2,
@@ -262,6 +263,49 @@ const genGemini = ({ text, from, to, url, key, prompt, model }) => {
return [url, init];
};
+const genClaude = ({
+ text,
+ from,
+ to,
+ url,
+ key,
+ systemPrompt,
+ prompt,
+ model,
+ temperature,
+ maxTokens,
+}) => {
+ prompt = prompt
+ .replaceAll(INPUT_PLACE_FROM, from)
+ .replaceAll(INPUT_PLACE_TO, to)
+ .replaceAll(INPUT_PLACE_TEXT, text);
+
+ const data = {
+ model,
+ system: systemPrompt,
+ messages: [
+ {
+ role: "user",
+ content: prompt,
+ },
+ ],
+ temperature,
+ max_tokens: maxTokens,
+ };
+
+ const init = {
+ headers: {
+ "Content-type": "application/json",
+ "anthropic-version": "2023-06-01",
+ "x-api-key": key,
+ },
+ method: "POST",
+ body: JSON.stringify(data),
+ };
+
+ return [url, init];
+};
+
const genOllama = ({ text, from, to, url, key, prompt, model }) => {
prompt = prompt
.replaceAll(INPUT_PLACE_FROM, from)
@@ -355,6 +399,7 @@ export const genTransReq = ({ translator, text, from, to }, apiSetting) => {
case OPT_TRANS_OPENAI_2:
case OPT_TRANS_OPENAI_3:
case OPT_TRANS_GEMINI:
+ case OPT_TRANS_CLAUDE:
case OPT_TRANS_CLOUDFLAREAI:
case OPT_TRANS_OLLAMA:
case OPT_TRANS_OLLAMA_2:
@@ -391,6 +436,8 @@ export const genTransReq = ({ translator, text, from, to }, apiSetting) => {
return genOpenAI(args);
case OPT_TRANS_GEMINI:
return genGemini(args);
+ case OPT_TRANS_CLAUDE:
+ return genClaude(args);
case OPT_TRANS_CLOUDFLAREAI:
return genCloudflareAI(args);
case OPT_TRANS_OLLAMA:
diff --git a/src/config/index.js b/src/config/index.js
index cea947f..104c848 100644
--- a/src/config/index.js
+++ b/src/config/index.js
@@ -116,6 +116,7 @@ export const OPT_TRANS_OPENAI = "OpenAI";
export const OPT_TRANS_OPENAI_2 = "OpenAI2";
export const OPT_TRANS_OPENAI_3 = "OpenAI3";
export const OPT_TRANS_GEMINI = "Gemini";
+export const OPT_TRANS_CLAUDE = "Claude";
export const OPT_TRANS_CLOUDFLAREAI = "CloudflareAI";
export const OPT_TRANS_OLLAMA = "Ollama";
export const OPT_TRANS_OLLAMA_2 = "Ollama2";
@@ -138,6 +139,7 @@ export const OPT_TRANS_ALL = [
OPT_TRANS_OPENAI_2,
OPT_TRANS_OPENAI_3,
OPT_TRANS_GEMINI,
+ OPT_TRANS_CLAUDE,
OPT_TRANS_CLOUDFLAREAI,
OPT_TRANS_OLLAMA,
OPT_TRANS_OLLAMA_2,
@@ -289,6 +291,9 @@ export const OPT_LANGS_SPECIAL = {
[OPT_TRANS_GEMINI]: new Map(
OPT_LANGS_FROM.map(([key, val]) => [key, val.split(" - ")[0]])
),
+ [OPT_TRANS_CLAUDE]: new Map(
+ OPT_LANGS_FROM.map(([key, val]) => [key, val.split(" - ")[0]])
+ ),
[OPT_TRANS_OLLAMA]: new Map(
OPT_LANGS_FROM.map(([key, val]) => [key, val.split(" - ")[0]])
),
@@ -590,6 +595,17 @@ export const DEFAULT_TRANS_APIS = {
fetchLimit: 1,
fetchInterval: 500,
},
+ [OPT_TRANS_CLAUDE]: {
+ url: "https://api.anthropic.com/v1/messages",
+ key: "",
+ model: "claude-3-haiku-20240307",
+ prompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`,
+ systemPrompt: `You are a professional, authentic machine translation engine.`,
+ temperature: 0,
+ maxTokens: 1024,
+ fetchLimit: 1,
+ fetchInterval: 500,
+ },
[OPT_TRANS_CLOUDFLAREAI]: {
url: "https://api.cloudflare.com/client/v4/accounts/{{ACCOUNT_ID}}/ai/run/@cf/meta/m2m100-1.2b",
key: "",
diff --git a/src/views/Options/Apis.js b/src/views/Options/Apis.js
index 15ec44a..eed003e 100644
--- a/src/views/Options/Apis.js
+++ b/src/views/Options/Apis.js
@@ -14,6 +14,7 @@ import {
OPT_TRANS_OPENAI_2,
OPT_TRANS_OPENAI_3,
OPT_TRANS_GEMINI,
+ OPT_TRANS_CLAUDE,
OPT_TRANS_CLOUDFLAREAI,
OPT_TRANS_OLLAMA,
OPT_TRANS_OLLAMA_2,
@@ -115,6 +116,7 @@ function ApiFields({ translator }) {
key = "",
model = "",
prompt = "",
+ systemPrompt = "",
fetchLimit = DEFAULT_FETCH_LIMIT,
fetchInterval = DEFAULT_FETCH_INTERVAL,
dictNo = "",
@@ -160,6 +162,7 @@ function ApiFields({ translator }) {
OPT_TRANS_OPENAI_2,
OPT_TRANS_OPENAI_3,
OPT_TRANS_GEMINI,
+ OPT_TRANS_CLAUDE,
OPT_TRANS_CLOUDFLAREAI,
OPT_TRANS_OLLAMA,
OPT_TRANS_OLLAMA_2,
@@ -212,6 +215,7 @@ function ApiFields({ translator }) {
{(translator.startsWith(OPT_TRANS_OPENAI) ||
translator.startsWith(OPT_TRANS_OLLAMA) ||
+ translator === OPT_TRANS_CLAUDE ||
translator === OPT_TRANS_GEMINI) && (
<>
)}
- {translator.startsWith(OPT_TRANS_OPENAI) && (
+ {translator === OPT_TRANS_CLAUDE && (
+ <>
+
+ >
+ )}
+
+ {(translator.startsWith(OPT_TRANS_OPENAI) ||
+ translator === OPT_TRANS_CLAUDE) && (
<>