diff --git a/src/apis/trans.js b/src/apis/trans.js
index 9b5689d..59dd45f 100644
--- a/src/apis/trans.js
+++ b/src/apis/trans.js
@@ -320,7 +320,7 @@ const genClaude = ({
return [url, init];
};
-const genOllama = ({ text, from, to, url, key, prompt, model }) => {
+const genOllama = ({ text, from, to, url, key, system,prompt, model }) => {
prompt = prompt
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
@@ -328,6 +328,7 @@ const genOllama = ({ text, from, to, url, key, prompt, model }) => {
const data = {
model,
+ system,
prompt,
stream: false,
};
diff --git a/src/config/index.js b/src/config/index.js
index f6ae28e..83e0a5a 100644
--- a/src/config/index.js
+++ b/src/config/index.js
@@ -537,8 +537,9 @@ const defaultOpenaiApi = {
const defaultOllamaApi = {
url: "http://localhost:11434/api/generate",
key: "",
- model: "llama3",
- prompt: `Translate the following text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}:\n\n${INPUT_PLACE_TEXT}`,
+ model: "llama3.1",
+ system:"You are a professional, authentic machine translation engine.",
+ prompt: `Translate the following text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO},output translation directly without any additional text:\n\n${INPUT_PLACE_TEXT}`,
fetchLimit: 1,
fetchInterval: 500,
};
diff --git a/src/views/Options/Apis.js b/src/views/Options/Apis.js
index e8b2b55..6d8a650 100644
--- a/src/views/Options/Apis.js
+++ b/src/views/Options/Apis.js
@@ -115,6 +115,7 @@ function ApiFields({ translator }) {
url = "",
key = "",
model = "",
+ system = "",
prompt = "",
systemPrompt = "",
fetchLimit = DEFAULT_FETCH_LIMIT,
@@ -214,7 +215,6 @@ function ApiFields({ translator }) {
)}
{(translator.startsWith(OPT_TRANS_OPENAI) ||
- translator.startsWith(OPT_TRANS_OLLAMA) ||
translator === OPT_TRANS_CLAUDE ||
translator === OPT_TRANS_GEMINI) && (
<>
@@ -236,6 +236,34 @@ function ApiFields({ translator }) {
/>
>
)}
+
+ {(translator.startsWith(OPT_TRANS_OLLAMA)) && (
+ <>
+
+
+
+ >
+ )}
{(translator.startsWith(OPT_TRANS_OPENAI) ||
translator === OPT_TRANS_CLAUDE) && (