AI Error handling; Fix logging
This commit is contained in:
parent
8f01db9643
commit
28f28eb0ce
4 changed files with 66 additions and 43 deletions
6
package-lock.json
generated
6
package-lock.json
generated
|
|
@ -1,12 +1,12 @@
|
||||||
{
|
{
|
||||||
"name": "aslobot-matrix",
|
"name": "aslobot-matrix",
|
||||||
"version": "0.5.0",
|
"version": "1.2.3",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "aslobot-matrix",
|
"name": "aslobot-matrix",
|
||||||
"version": "0.5.0",
|
"version": "1.2.3",
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@google/genai": "^1.34.0",
|
"@google/genai": "^1.34.0",
|
||||||
|
|
@ -165,7 +165,6 @@
|
||||||
"integrity": "sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==",
|
"integrity": "sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"undici-types": "~7.16.0"
|
"undici-types": "~7.16.0"
|
||||||
}
|
}
|
||||||
|
|
@ -1077,7 +1076,6 @@
|
||||||
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
|
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"peer": true,
|
|
||||||
"bin": {
|
"bin": {
|
||||||
"tsc": "bin/tsc",
|
"tsc": "bin/tsc",
|
||||||
"tsserver": "bin/tsserver"
|
"tsserver": "bin/tsserver"
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "aslobot-matrix",
|
"name": "aslobot-matrix",
|
||||||
"version": "1.2.2",
|
"version": "1.2.3",
|
||||||
"description": "",
|
"description": "",
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"author": "",
|
"author": "",
|
||||||
|
|
|
||||||
|
|
@ -129,7 +129,7 @@ const changePersonality = (
|
||||||
const log = (logMessage: string) => {
|
const log = (logMessage: string) => {
|
||||||
appendFileSync(
|
appendFileSync(
|
||||||
config.logPath,
|
config.logPath,
|
||||||
`[${new Date().toLocaleString()}] ${logMessage}`,
|
`[${new Date().toLocaleString()}] ${logMessage}\n`,
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,7 @@ import { toolFunctions, tools } from "./tools.js";
|
||||||
import type { FunctionResponse } from "@google/genai";
|
import type { FunctionResponse } from "@google/genai";
|
||||||
import type { Content } from "@google/genai";
|
import type { Content } from "@google/genai";
|
||||||
import { log } from "../../helpers.js";
|
import { log } from "../../helpers.js";
|
||||||
|
import type { GenerateContentResponse } from "@google/genai";
|
||||||
|
|
||||||
const googleAI = new GoogleGenAI({
|
const googleAI = new GoogleGenAI({
|
||||||
apiKey: config.app.ai.api.key,
|
apiKey: config.app.ai.api.key,
|
||||||
|
|
@ -61,19 +62,27 @@ const getTextGemini = async (
|
||||||
? [oldInputContent, inputContent]
|
? [oldInputContent, inputContent]
|
||||||
: [inputContent];
|
: [inputContent];
|
||||||
|
|
||||||
const response = await googleAI.models.generateContent({
|
let response: GenerateContentResponse;
|
||||||
model: "gemini-3-flash-preview",
|
try {
|
||||||
contents: contents,
|
response = await googleAI.models.generateContent({
|
||||||
config: {
|
model: "gemini-3-flash-preview",
|
||||||
systemInstruction: JSON.stringify(instructions),
|
contents: contents,
|
||||||
toolConfig: {
|
config: {
|
||||||
functionCallingConfig: {
|
systemInstruction: JSON.stringify(instructions),
|
||||||
mode: FunctionCallingConfigMode.AUTO,
|
toolConfig: {
|
||||||
|
functionCallingConfig: {
|
||||||
|
mode: FunctionCallingConfigMode.AUTO,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
|
tools: [{ functionDeclarations: tools }],
|
||||||
},
|
},
|
||||||
tools: [{ functionDeclarations: tools }],
|
});
|
||||||
},
|
} catch (e: unknown) {
|
||||||
});
|
return {
|
||||||
|
text: "AI Error",
|
||||||
|
tokens: 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
let text = response.text ?? "AI Error";
|
let text = response.text ?? "AI Error";
|
||||||
let token = response.usageMetadata?.totalTokenCount ?? 0;
|
let token = response.usageMetadata?.totalTokenCount ?? 0;
|
||||||
|
|
@ -111,30 +120,38 @@ const getTextGemini = async (
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const responseTool = await googleAI.models.generateContent({
|
let responseTool: GenerateContentResponse;
|
||||||
model: "gemini-3-flash-preview",
|
try {
|
||||||
contents: [
|
responseTool = await googleAI.models.generateContent({
|
||||||
...contents,
|
model: "gemini-3-flash-preview",
|
||||||
content,
|
contents: [
|
||||||
{
|
...contents,
|
||||||
role: "tool",
|
content,
|
||||||
parts: [
|
{
|
||||||
{
|
role: "tool",
|
||||||
functionResponse: functionResponse,
|
parts: [
|
||||||
},
|
{
|
||||||
],
|
functionResponse: functionResponse,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
config: {
|
|
||||||
systemInstruction: JSON.stringify(instructions),
|
|
||||||
toolConfig: {
|
|
||||||
functionCallingConfig: {
|
|
||||||
mode: FunctionCallingConfigMode.AUTO,
|
|
||||||
},
|
},
|
||||||
|
],
|
||||||
|
config: {
|
||||||
|
systemInstruction: JSON.stringify(instructions),
|
||||||
|
toolConfig: {
|
||||||
|
functionCallingConfig: {
|
||||||
|
mode: FunctionCallingConfigMode.AUTO,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tools: [{ functionDeclarations: tools }],
|
||||||
},
|
},
|
||||||
tools: [{ functionDeclarations: tools }],
|
});
|
||||||
},
|
} catch (e: unknown) {
|
||||||
});
|
return {
|
||||||
|
text: "AI Error",
|
||||||
|
tokens: 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
text: responseTool.text ?? "AI Error",
|
text: responseTool.text ?? "AI Error",
|
||||||
|
|
@ -143,10 +160,18 @@ const getTextGemini = async (
|
||||||
};
|
};
|
||||||
|
|
||||||
const getImageGemini = async (input: string): Promise<AIResponseImage> => {
|
const getImageGemini = async (input: string): Promise<AIResponseImage> => {
|
||||||
const response = await googleAI.models.generateContent({
|
let response: GenerateContentResponse;
|
||||||
model: "gemini-2.5-flash-image",
|
try {
|
||||||
contents: input,
|
response = await googleAI.models.generateContent({
|
||||||
});
|
model: "gemini-2.5-flash-image",
|
||||||
|
contents: input,
|
||||||
|
});
|
||||||
|
} catch (e: unknown) {
|
||||||
|
return {
|
||||||
|
image: undefined,
|
||||||
|
tokens: 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
const firstCandidate = (response.candidates ?? [])[0];
|
const firstCandidate = (response.candidates ?? [])[0];
|
||||||
const parts = firstCandidate?.content?.parts ?? [];
|
const parts = firstCandidate?.content?.parts ?? [];
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue