diff --git a/package-lock.json b/package-lock.json index d2772f5..2d78344 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "aslobot-matrix", - "version": "0.5.0", + "version": "1.2.3", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "aslobot-matrix", - "version": "0.5.0", + "version": "1.2.3", "license": "ISC", "dependencies": { "@google/genai": "^1.34.0", @@ -165,7 +165,6 @@ "integrity": "sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "undici-types": "~7.16.0" } @@ -1077,7 +1076,6 @@ "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", "dev": true, "license": "Apache-2.0", - "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" diff --git a/package.json b/package.json index 477c418..7ad3718 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "aslobot-matrix", - "version": "1.2.2", + "version": "1.2.3", "description": "", "license": "ISC", "author": "", diff --git a/src/helpers.ts b/src/helpers.ts index 28ce310..2af3a36 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -129,7 +129,7 @@ const changePersonality = ( const log = (logMessage: string) => { appendFileSync( config.logPath, - `[${new Date().toLocaleString()}] ${logMessage}`, + `[${new Date().toLocaleString()}] ${logMessage}\n`, ); }; diff --git a/src/services/ai/ai.ts b/src/services/ai/ai.ts index ba2537e..b3c54ee 100644 --- a/src/services/ai/ai.ts +++ b/src/services/ai/ai.ts @@ -11,6 +11,7 @@ import { toolFunctions, tools } from "./tools.js"; import type { FunctionResponse } from "@google/genai"; import type { Content } from "@google/genai"; import { log } from "../../helpers.js"; +import type { GenerateContentResponse } from "@google/genai"; const googleAI = new GoogleGenAI({ apiKey: config.app.ai.api.key, @@ -61,19 +62,27 @@ const getTextGemini = async ( ? [oldInputContent, inputContent] : [inputContent]; - const response = await googleAI.models.generateContent({ - model: "gemini-3-flash-preview", - contents: contents, - config: { - systemInstruction: JSON.stringify(instructions), - toolConfig: { - functionCallingConfig: { - mode: FunctionCallingConfigMode.AUTO, + let response: GenerateContentResponse; + try { + response = await googleAI.models.generateContent({ + model: "gemini-3-flash-preview", + contents: contents, + config: { + systemInstruction: JSON.stringify(instructions), + toolConfig: { + functionCallingConfig: { + mode: FunctionCallingConfigMode.AUTO, + }, }, + tools: [{ functionDeclarations: tools }], }, - tools: [{ functionDeclarations: tools }], - }, - }); + }); + } catch (e: unknown) { + return { + text: "AI Error", + tokens: 0, + }; + } let text = response.text ?? "AI Error"; let token = response.usageMetadata?.totalTokenCount ?? 0; @@ -111,30 +120,38 @@ const getTextGemini = async ( }, }; - const responseTool = await googleAI.models.generateContent({ - model: "gemini-3-flash-preview", - contents: [ - ...contents, - content, - { - role: "tool", - parts: [ - { - functionResponse: functionResponse, - }, - ], - }, - ], - config: { - systemInstruction: JSON.stringify(instructions), - toolConfig: { - functionCallingConfig: { - mode: FunctionCallingConfigMode.AUTO, + let responseTool: GenerateContentResponse; + try { + responseTool = await googleAI.models.generateContent({ + model: "gemini-3-flash-preview", + contents: [ + ...contents, + content, + { + role: "tool", + parts: [ + { + functionResponse: functionResponse, + }, + ], }, + ], + config: { + systemInstruction: JSON.stringify(instructions), + toolConfig: { + functionCallingConfig: { + mode: FunctionCallingConfigMode.AUTO, + }, + }, + tools: [{ functionDeclarations: tools }], }, - tools: [{ functionDeclarations: tools }], - }, - }); + }); + } catch (e: unknown) { + return { + text: "AI Error", + tokens: 0, + }; + } return { text: responseTool.text ?? "AI Error", @@ -143,10 +160,18 @@ const getTextGemini = async ( }; const getImageGemini = async (input: string): Promise => { - const response = await googleAI.models.generateContent({ - model: "gemini-2.5-flash-image", - contents: input, - }); + let response: GenerateContentResponse; + try { + response = await googleAI.models.generateContent({ + model: "gemini-2.5-flash-image", + contents: input, + }); + } catch (e: unknown) { + return { + image: undefined, + tokens: 0, + }; + } const firstCandidate = (response.candidates ?? [])[0]; const parts = firstCandidate?.content?.parts ?? [];