diff --git a/.env.example b/.env.example index dd705bb..9da9f23 100644 --- a/.env.example +++ b/.env.example @@ -1,12 +1,17 @@ -OpenAI_API_KEY='sk-...' -OpenAI_API_ENDPOINT='https://api.openai.com/v1/chat/completions' +OpenAI_API_ENDPOINT='https://api.chatnio.net/v1/chat/completions' +OpenAI_API_KEY='sk-' OpenAI_MODEL='gpt-3.5-turbo' -Gemini_API_KEY='' Gemini_API_ENDPOINT='https://generativelanguage.googleapis.com' +Gemini_API_KEY='' + +NIUTRANS_KEY='' + +DEEPL_X_API_URL='' -NIUTRANS_KEY='...' -DEEPL_X_API_URL='...' +BAIDU_APP_ID='' +BAIDU_KEY='' -BAIDU_APP_ID='...' -BAIDU_KEY='...' \ No newline at end of file +QWEN_API_ENDPOINT='https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation' +QWEN_API_KEY='sk-' +QWEN_MODEL='qwen-turbo' \ No newline at end of file diff --git a/setup.sh b/setup.sh index 0db8c0b..f4a426f 100644 --- a/setup.sh +++ b/setup.sh @@ -77,9 +77,13 @@ if docker ps -a -q --filter "name=lyrify" | grep -q .; then read -p "Gemini API Endpoint (Press Enter to use default): " gemini_api_endpoint read -p "Gemini API Key: " gemini_api_key read -p "NIUTRANS Key: " niutrans_key - read -p "DEEPL_X API URL: " deepl_x_api_url - read -p "BAIDU_APP_ID: " baidu_app_id - read -p "BAIDU_KEY: " baidu_key + read -p "DEEPL X API URL: " deepl_x_api_url + read -p "BAIDU APP ID: " baidu_app_id + read -p "BAIDU KEY: " baidu_key + read -p "Qwen API Endpoint (Press Enter to use default): " qwen_api_endpoint + read -p "Qwen API Key: " qwen_api_key + read -p "Qwen Model: " qwen_model + cat </tmp/.Lyrify/config.txt openai_api_endpoint=${openai_api_endpoint:-https://api.openai.com/v1/chat/completions} @@ -91,6 +95,9 @@ niutrans_key=$niutrans_key deepl_x_api_url=$deepl_x_api_url baidu_app_id=$baidu_app_id baidu_key=$baidu_key +qwen_api_endpoint=${qwen_api_endpoint-https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation} +qwen_api_key=$qwen_api_key +qwen_model=$qwen_model EOL fi @@ -110,6 +117,9 @@ EOL -e DEEPL_X_API_URL="$deepl_x_api_url" \ -e BAIDU_APP_ID="$baidu_app_id" \ -e BAIDU_KEY="$baidu_key" \ + -e QWEN_API_ENDPOINT="$qwen_api_endpoint" \ + -e QWEN_API_KEY="$qwen_api_key" \ + -e QWEN_MODEL="$qwen_model" \ sipcink/lyrify:online if [ $? -ne 0 ]; then @@ -141,9 +151,13 @@ else read -p "Gemini API Endpoint (Press Enter to use default): " gemini_api_endpoint read -p "Gemini API Key: " gemini_api_key read -p "NIUTRANS Key: " niutrans_key - read -p "DEEPL_X API URL: " deepl_x_api_url - read -p "BAIDU_APP_ID: " baidu_app_id - read -p "BAIDU_KEY: " baidu_key + read -p "DEEPL X API URL: " deepl_x_api_url + read -p "BAIDU APP ID: " baidu_app_id + read -p "BAIDU KEY: " baidu_key + read -p "Qwen API Endpoint (Press Enter to use default): " qwen_api_endpoint + read -p "Qwen API Key: " qwen_api_key + read -p "Qwen Model: " qwen_model + cat <config.txt openai_api_endpoint=${openai_api_endpoint:-https://api.openai.com/v1/chat/completions} @@ -155,6 +169,9 @@ niutrans_key=$niutrans_key deepl_x_api_url=$deepl_x_api_url baidu_app_id=$baidu_app_id baidu_key=$baidu_key +qwen_api_endpoint=${qwen_api_endpoint-https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation} +qwen_api_key=$qwen_api_key +qwen_model=$qwen_model EOL docker run -d --name lyrify \ @@ -168,6 +185,9 @@ EOL -e DEEPL_X_API_URL="$deepl_x_api_url" \ -e BAIDU_APP_ID="$baidu_app_id" \ -e BAIDU_KEY="$baidu_key" \ + -e QWEN_API_ENDPOINT="$qwen_api_endpoint" \ + -e QWEN_API_KEY="$qwen_api_key" \ + -e QWEN_MODEL="$qwen_model" \ sipcink/lyrify:online if [ $? -ne "0" ]; then diff --git a/src/components/ResultBox.tsx b/src/components/ResultBox.tsx index 6411b91..7ccf93b 100644 --- a/src/components/ResultBox.tsx +++ b/src/components/ResultBox.tsx @@ -106,6 +106,7 @@ export function ResultContainer({ loading, result, isExpanded}: ResultContainerP
+ @@ -115,4 +116,4 @@ export function ResultContainer({ loading, result, isExpanded}: ResultContainerP
); } -export function getResult() {return ['chatgpt', 'gemini', 'deeplx', 'microsoft', 'google', 'transmart', 'niutrans', 'baidu']} \ No newline at end of file +export function getResult() {return ['chatgpt', 'gemini', 'qwen', 'deeplx', 'microsoft', 'google', 'transmart', 'niutrans', 'baidu']} \ No newline at end of file diff --git a/src/lib/api.ts b/src/lib/api.ts index 86e2022..c05d0d0 100644 --- a/src/lib/api.ts +++ b/src/lib/api.ts @@ -3,6 +3,7 @@ import axios from "axios"; export type TranslateResult = { chatgpt: string; gemini: string; + qwen: string; deeplx: string; microsoft: string; google: string; @@ -15,6 +16,7 @@ export type Translateloader = { translate: boolean, chatgpt: boolean; gemini: boolean; + qwen: boolean; deeplx: boolean; microsoft: boolean; google: boolean; @@ -33,6 +35,7 @@ export type TranslateResponse = { export const initializeTranslateState: TranslateResult = { chatgpt: "", gemini: "", + qwen: "", deeplx: "", microsoft: "", google: "", @@ -45,6 +48,7 @@ export const initializeTranslateloader: Translateloader = { translate: false, chatgpt: false, gemini: false, + qwen: false, deeplx: false, microsoft: false, google: false, diff --git a/src/pages/api/lib/qwen.ts b/src/pages/api/lib/qwen.ts new file mode 100644 index 0000000..b08ab7b --- /dev/null +++ b/src/pages/api/lib/qwen.ts @@ -0,0 +1,70 @@ +// code from sipc + +import axios from "axios"; +import { getErrorMessage } from "@/pages/api/lib/utils"; + +export class Qwen { + public apiUrl: string; + public key: string; + public model: string; + + constructor( + apiUrl = "https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation", + key: string, + model = "qwen-turbo", + ) { + this.apiUrl = apiUrl; + this.key = key; + this.model = model; + } + + async translate(text: string, target: string, source: string = "auto") { + if (target === "classical-chinese") { + target = "文言文"; + if (source === "zh") { + source = "白话文"; + } + } + if (source === "classical-chinese") { + source = "文言文"; + if (target === "zh") { + target = "白话文"; + } + } + try { + const headers = { + "Content-Type": "application/json", + Authorization: `Bearer ${this.key}`, + }; + const data = JSON.stringify({ + model: this.model, + input: { + messages: [ + { + role: "system", + content: `You are a professional, authentic translation engine, only returns translations.`, + }, + { + role: "user", + content: `Please translate the text from ${source} to ${target} language, without explaining my original text.`, + }, + { + role: "user", + content: text, + }, + ], + }, + }); + const response = await axios.post(this.apiUrl, data, { headers }); + return response.data.output.text; + } catch (error:any) { + throw new Error(`Error while translating: ${getErrorMessage(error.response.data)}`); + } + } +} + +export const QwenInstance = new Qwen( + process.env.QWEN_API_ENDPOINT!, + process.env.QWEN_API_KEY!, + process.env.QWEN_MODEL!, +); diff --git a/src/pages/api/translate.ts b/src/pages/api/translate.ts index 0dca10a..2f8b37e 100644 --- a/src/pages/api/translate.ts +++ b/src/pages/api/translate.ts @@ -8,6 +8,7 @@ import { GeminiInstance } from "./lib/gemini"; import { TransmartInstance } from "./lib/transmart"; import { NiutransInstance } from "./lib/niutrans"; import { BaiduInstance } from "./lib/baidu"; +import { QwenInstance } from "./lib/qwen"; import { autodetect } from "./lib/autodetect"; type TranslateResponse = { @@ -54,6 +55,12 @@ export default async function handler( targetLanguage, sourceLanguage, ).catch((e) => e.message); + case "qwen": + return await QwenInstance.translate( + text, + targetLanguage, + sourceLanguage, + ).catch((e) => e.message); case "baidu": return await BaiduInstance.translate( text, diff --git a/src/styles/globals.css b/src/styles/globals.css index fa6704d..279c667 100644 --- a/src/styles/globals.css +++ b/src/styles/globals.css @@ -197,9 +197,4 @@ main { .result-container p { text-align: justify; -} - -@media (max-width: 680px) { - .result-container { - } -} +} \ No newline at end of file