diff --git a/packages/backend/src/models/Meta.ts b/packages/backend/src/models/Meta.ts index 63e2331915..9f1da8b0fd 100644 --- a/packages/backend/src/models/Meta.ts +++ b/packages/backend/src/models/Meta.ts @@ -665,12 +665,12 @@ export class MiMeta { }) public googleAnalyticsMeasurementId: string | null; - @Column('boolean',{ + @Column('boolean', { default: false, }) public enableLlmTranslator: boolean; - @Column('boolean',{ + @Column('boolean', { default: false, }) public enableLlmTranslatorRedisCache: boolean; diff --git a/packages/backend/src/server/api/endpoints/admin/update-meta.ts b/packages/backend/src/server/api/endpoints/admin/update-meta.ts index 0327ee22a4..521814179f 100644 --- a/packages/backend/src/server/api/endpoints/admin/update-meta.ts +++ b/packages/backend/src/server/api/endpoints/admin/update-meta.ts @@ -187,15 +187,15 @@ export const paramDef = { }, enableLlmTranslator: { type: 'boolean' }, enableLlmTranslatorRedisCache: { type: 'boolean' }, - llmTranslatorRedisCacheTtl: { type: 'integer' , nullable: true}, - llmTranslatorBaseUrl: { type: 'string' , nullable: true}, - llmTranslatorApiKey: { type: 'string' , nullable: true}, - llmTranslatorModel: { type: 'string' , nullable: true}, - llmTranslatorTemperature: { type: 'number' , nullable: true}, - llmTranslatorTopP: { type: 'number' , nullable: true}, - llmTranslatorMaxTokens: { type: 'integer' , nullable: true}, - llmTranslatorSysPrompt: { type: 'string' , nullable: true}, - llmTranslatorUserPrompt: { type: 'string' , nullable: true}, + llmTranslatorRedisCacheTtl: { type: 'integer' }, + llmTranslatorBaseUrl: { type: 'string', nullable: true }, + llmTranslatorApiKey: { type: 'string', nullable: true }, + llmTranslatorModel: { type: 'string', nullable: true }, + llmTranslatorTemperature: { type: 'number', nullable: true }, + llmTranslatorTopP: { type: 'number', nullable: true }, + llmTranslatorMaxTokens: { type: 'integer', nullable: true }, + llmTranslatorSysPrompt: { type: 'string', nullable: true }, + llmTranslatorUserPrompt: { type: 'string', nullable: true }, }, required: [], } as const; diff --git a/packages/backend/src/server/api/endpoints/notes/translate.ts b/packages/backend/src/server/api/endpoints/notes/translate.ts index 0fb32d3571..e85d063e0e 100644 --- a/packages/backend/src/server/api/endpoints/notes/translate.ts +++ b/packages/backend/src/server/api/endpoints/notes/translate.ts @@ -5,16 +5,16 @@ import { URLSearchParams } from 'node:url'; import { Inject, Injectable } from '@nestjs/common'; +import { OpenAI } from 'openai'; +import * as Redis from 'ioredis'; import { Endpoint } from '@/server/api/endpoint-base.js'; import { NoteEntityService } from '@/core/entities/NoteEntityService.js'; import { HttpRequestService } from '@/core/HttpRequestService.js'; import { GetterService } from '@/server/api/GetterService.js'; import { RoleService } from '@/core/RoleService.js'; -import { ApiError } from '../../error.js'; import { MiMeta } from '@/models/_.js'; import { DI } from '@/di-symbols.js'; -import { OpenAI } from "openai"; -import * as Redis from 'ioredis'; +import { ApiError } from '../../error.js'; export const meta = { tags: ['notes'], @@ -96,7 +96,7 @@ export default class extends Endpoint { // eslint- const res = await this.llmTranslate(note.text, ps.targetLang, note.id); return { text: res, - } + }; } if (this.serverSettings.deeplAuthKey == null) { @@ -136,26 +136,24 @@ export default class extends Endpoint { // eslint- }); } - private async llmTranslate(text: string, targetLang: string, noteId: string): Promise { + private async llmTranslate(text: string, targetLang: string, noteId: string): Promise { if (this.serverSettings.enableLlmTranslatorRedisCache) { const key = `llmTranslate:${targetLang}:${noteId}`; const cached = await this.redisClient.get(key); if (cached != null) { - this.redisClient.expire(key, this.serverSettings.llmTranslatorRedisCacheTtl*60); + this.redisClient.expire(key, this.serverSettings.llmTranslatorRedisCacheTtl * 60); return cached; } const res = await this.getLlmRes(text, targetLang); - await this.redisClient.set(key, res ?? ''); - this.redisClient.expire(key, this.serverSettings.llmTranslatorRedisCacheTtl*60); + await this.redisClient.set(key, res); + this.redisClient.expire(key, this.serverSettings.llmTranslatorRedisCacheTtl * 60); return res; - } - else { + } else { return this.getLlmRes(text, targetLang); } } - - private async getLlmRes(text: string, targetLang: string): Promise { + private async getLlmRes(text: string, targetLang: string): Promise { const client = new OpenAI({ baseURL: this.serverSettings.llmTranslatorBaseUrl, apiKey: this.serverSettings.llmTranslatorApiKey ?? '', @@ -173,8 +171,8 @@ export default class extends Endpoint { // eslint- temperature: this.serverSettings.llmTranslatorTemperature, max_tokens: this.serverSettings.llmTranslatorMaxTokens, top_p: this.serverSettings.llmTranslatorTopP, - }) + }); - return completion.choices[0].message.content + return completion.choices[0].message.content ?? ''; } } diff --git a/packages/frontend/src/pages/admin/external-services.vue b/packages/frontend/src/pages/admin/external-services.vue index 9c133641d3..1e84523a92 100644 --- a/packages/frontend/src/pages/admin/external-services.vue +++ b/packages/frontend/src/pages/admin/external-services.vue @@ -125,7 +125,6 @@ const llmTranslatorUserPrompt = ref(''); const llmTranslatorRedisCacheEnabled = ref(false); const llmTranslatorRedisCacheTtl = ref(0); - async function init() { const meta = await misskeyApi('admin/meta'); deeplAuthKey.value = meta.deeplAuthKey ?? '';