fix: modify systemPrompt & userPrompt

This commit is contained in:
Gabe
2024-11-30 00:41:29 +08:00
parent 121d523e02
commit 6855332092
3 changed files with 58 additions and 75 deletions

View File

@@ -193,23 +193,23 @@ const genOpenAI = ({
url, url,
key, key,
systemPrompt, systemPrompt,
prompt, userPrompt,
model, model,
temperature, temperature,
maxTokens, maxTokens,
}) => { }) => {
// 兼容历史上作为systemPrompt的prompt如果prompt中不包含带翻译文本则添加文本到prompt末尾 // 兼容历史上作为systemPrompt的prompt如果prompt中不包含带翻译文本则添加文本到prompt末尾
if (!prompt.includes(INPUT_PLACE_TEXT)) { // if (!prompt.includes(INPUT_PLACE_TEXT)) {
prompt += `\nSource Text: ${INPUT_PLACE_TEXT}`; // prompt += `\nSource Text: ${INPUT_PLACE_TEXT}`;
} // }
prompt = prompt systemPrompt = systemPrompt
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
userPrompt = userPrompt
.replaceAll(INPUT_PLACE_FROM, from) .replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to) .replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text); .replaceAll(INPUT_PLACE_TEXT, text);
systemPrompt = systemPrompt
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
const data = { const data = {
model, model,
@@ -220,7 +220,7 @@ const genOpenAI = ({
}, },
{ {
role: "user", role: "user",
content: prompt, content: userPrompt,
}, },
], ],
temperature, temperature,
@@ -240,26 +240,30 @@ const genOpenAI = ({
return [url, init]; return [url, init];
}; };
const genGemini = ({ text, from, to, url, key, prompt, model }) => { const genGemini = ({ text, from, to, url, key, systemPrompt, userPrompt, model }) => {
url = url url = url
.replaceAll(INPUT_PLACE_MODEL, model) .replaceAll(INPUT_PLACE_MODEL, model)
.replaceAll(INPUT_PLACE_KEY, key); .replaceAll(INPUT_PLACE_KEY, key);
prompt = prompt systemPrompt = systemPrompt
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
userPrompt = userPrompt
.replaceAll(INPUT_PLACE_FROM, from) .replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to) .replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text); .replaceAll(INPUT_PLACE_TEXT, text);
const data = { const data = {
contents: [ system_instruction: {
{ parts: {
// role: "user", text: systemPrompt,
parts: [ }
{ },
text: prompt, contents: {
}, parts: {
], text: userPrompt,
}, }
], }
}; };
const init = { const init = {
@@ -280,19 +284,19 @@ const genClaude = ({
url, url,
key, key,
systemPrompt, systemPrompt,
prompt, userPrompt,
model, model,
temperature, temperature,
maxTokens, maxTokens,
}) => { }) => {
prompt = prompt
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
systemPrompt = systemPrompt systemPrompt = systemPrompt
.replaceAll(INPUT_PLACE_FROM, from) .replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to) .replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text); .replaceAll(INPUT_PLACE_TEXT, text);
userPrompt = userPrompt
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
const data = { const data = {
model, model,
@@ -300,7 +304,7 @@ const genClaude = ({
messages: [ messages: [
{ {
role: "user", role: "user",
content: prompt, content: userPrompt,
}, },
], ],
temperature, temperature,
@@ -320,16 +324,20 @@ const genClaude = ({
return [url, init]; return [url, init];
}; };
const genOllama = ({ text, from, to, url, key, system,prompt, model }) => { const genOllama = ({ text, from, to, url, key, systemPrompt, userPrompt, model }) => {
prompt = prompt systemPrompt = systemPrompt
.replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text);
userPrompt = userPrompt
.replaceAll(INPUT_PLACE_FROM, from) .replaceAll(INPUT_PLACE_FROM, from)
.replaceAll(INPUT_PLACE_TO, to) .replaceAll(INPUT_PLACE_TO, to)
.replaceAll(INPUT_PLACE_TEXT, text); .replaceAll(INPUT_PLACE_TEXT, text);
const data = { const data = {
model, model,
system, system: systemPrompt,
prompt, prompt: userPrompt,
stream: false, stream: false,
}; };

View File

@@ -527,8 +527,8 @@ const defaultOpenaiApi = {
url: "https://api.openai.com/v1/chat/completions", url: "https://api.openai.com/v1/chat/completions",
key: "", key: "",
model: "gpt-4", model: "gpt-4",
prompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`, systemPrompt: `You are a professional, authentic machine translation engine.`,
systemPrompt: `You are a professional, authentic machine translation engine. You will be provided with a sentence in ${INPUT_PLACE_FROM}, and your task is to translate it into ${INPUT_PLACE_TO}.`, userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`,
temperature: 0, temperature: 0,
maxTokens: 256, maxTokens: 256,
fetchLimit: 1, fetchLimit: 1,
@@ -538,8 +538,8 @@ const defaultOllamaApi = {
url: "http://localhost:11434/api/generate", url: "http://localhost:11434/api/generate",
key: "", key: "",
model: "llama3.1", model: "llama3.1",
system:"You are a professional, authentic machine translation engine.", systemPrompt: `You are a professional, authentic machine translation engine.`,
prompt: `Translate the following text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO},output translation directly without any additional text:\n\n${INPUT_PLACE_TEXT}`, userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`,
fetchLimit: 1, fetchLimit: 1,
fetchInterval: 500, fetchInterval: 500,
}; };
@@ -593,7 +593,8 @@ export const DEFAULT_TRANS_APIS = {
url: `https://generativelanguage.googleapis.com/v1/models/${INPUT_PLACE_MODEL}:generateContent?key=${INPUT_PLACE_KEY}`, url: `https://generativelanguage.googleapis.com/v1/models/${INPUT_PLACE_MODEL}:generateContent?key=${INPUT_PLACE_KEY}`,
key: "", key: "",
model: "gemini-pro", model: "gemini-pro",
prompt: `Translate the following text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}:\n\n${INPUT_PLACE_TEXT}`, systemPrompt: `You are a professional, authentic machine translation engine.`,
userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`,
fetchLimit: 1, fetchLimit: 1,
fetchInterval: 500, fetchInterval: 500,
}, },
@@ -601,8 +602,8 @@ export const DEFAULT_TRANS_APIS = {
url: "https://api.anthropic.com/v1/messages", url: "https://api.anthropic.com/v1/messages",
key: "", key: "",
model: "claude-3-haiku-20240307", model: "claude-3-haiku-20240307",
prompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`,
systemPrompt: `You are a professional, authentic machine translation engine.`, systemPrompt: `You are a professional, authentic machine translation engine.`,
userPrompt: `Translate the following source text from ${INPUT_PLACE_FROM} to ${INPUT_PLACE_TO}. Output translation directly without any additional text.\n\nSource Text: ${INPUT_PLACE_TEXT}\n\nTranslated Text:`,
temperature: 0, temperature: 0,
maxTokens: 1024, maxTokens: 1024,
fetchLimit: 1, fetchLimit: 1,

View File

@@ -115,9 +115,8 @@ function ApiFields({ translator }) {
url = "", url = "",
key = "", key = "",
model = "", model = "",
system = "",
prompt = "",
systemPrompt = "", systemPrompt = "",
userPrompt = "",
fetchLimit = DEFAULT_FETCH_LIMIT, fetchLimit = DEFAULT_FETCH_LIMIT,
fetchInterval = DEFAULT_FETCH_INTERVAL, fetchInterval = DEFAULT_FETCH_INTERVAL,
dictNo = "", dictNo = "",
@@ -215,6 +214,7 @@ function ApiFields({ translator }) {
)} )}
{(translator.startsWith(OPT_TRANS_OPENAI) || {(translator.startsWith(OPT_TRANS_OPENAI) ||
translator.startsWith(OPT_TRANS_OLLAMA) ||
translator === OPT_TRANS_CLAUDE || translator === OPT_TRANS_CLAUDE ||
translator === OPT_TRANS_GEMINI) && ( translator === OPT_TRANS_GEMINI) && (
<> <>
@@ -227,37 +227,18 @@ function ApiFields({ translator }) {
/> />
<TextField <TextField
size="small" size="small"
label={"PROMPT"} label={"SYSTEM PROMPT"}
name="prompt" name="systemPrompt"
value={prompt} value={systemPrompt}
onChange={handleChange} onChange={handleChange}
multiline multiline
maxRows={10} maxRows={10}
/> />
</>
)}
{(translator.startsWith(OPT_TRANS_OLLAMA)) && (
<>
<TextField <TextField
size="small" size="small"
label={"MODEL"} label={"USER PROMPT"}
name="model" name="userPrompt"
value={model} value={userPrompt}
onChange={handleChange}
/>
<TextField
size="small"
label={"SYSTEM MESSAGE"}
name="system"
value={system}
onChange={handleChange}
/>
<TextField
size="small"
label={"PROMPT"}
name="prompt"
value={prompt}
onChange={handleChange} onChange={handleChange}
multiline multiline
maxRows={10} maxRows={10}
@@ -268,13 +249,6 @@ function ApiFields({ translator }) {
{(translator.startsWith(OPT_TRANS_OPENAI) || {(translator.startsWith(OPT_TRANS_OPENAI) ||
translator === OPT_TRANS_CLAUDE) && ( translator === OPT_TRANS_CLAUDE) && (
<> <>
<TextField
size="small"
label={"SYSTEM PROMPT"}
name="systemPrompt"
value={systemPrompt}
onChange={handleChange}
/>
<TextField <TextField
size="small" size="small"
label={"Temperature"} label={"Temperature"}