Add smart models
This commit is contained in:
parent
7b364bce7d
commit
b8cd35540f
4 changed files with 38 additions and 7 deletions
|
|
@ -46,6 +46,8 @@ const onAI = async (
|
|||
|
||||
let personality = config.app.ai.personalities[state.personality.index];
|
||||
|
||||
const useSmartModel = text.startsWith("!aipro");
|
||||
|
||||
let textMod = text.replace("!ai", "").trim();
|
||||
let instructions = {
|
||||
prefferedLanguages: ["english", "slovak"],
|
||||
|
|
@ -72,6 +74,7 @@ const onAI = async (
|
|||
roomId: roomId,
|
||||
sender: sender,
|
||||
},
|
||||
useSmartModel,
|
||||
instructions,
|
||||
`${username}: ${textMod}`,
|
||||
`${repliedUsername}: ${repliedMessage}`,
|
||||
|
|
@ -100,6 +103,8 @@ const onAI = async (
|
|||
const onImageGen = async (text: string, roomId: string, sender: string) => {
|
||||
const user = getUserById(sender);
|
||||
|
||||
const useSmartModel = text.startsWith("!imgpro");
|
||||
|
||||
let textMod = text.replace("!img", "").trim().toLowerCase();
|
||||
alts.forEach((alt) => {
|
||||
alt.keys.forEach((key) => {
|
||||
|
|
@ -107,7 +112,7 @@ const onImageGen = async (text: string, roomId: string, sender: string) => {
|
|||
});
|
||||
});
|
||||
|
||||
const responseAI = await getImageGemini(textMod);
|
||||
const responseAI = await getImageGemini(textMod, useSmartModel);
|
||||
|
||||
user.aiCost += responseAI.tokens * prices.image;
|
||||
if (!responseAI.image || responseAI.image.length < 10) {
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ import type { ICallbackStore } from "../types.js";
|
|||
import { config } from "../../config.js";
|
||||
import {
|
||||
existsEntity,
|
||||
getEntitiesAtLocation,
|
||||
getEntityByName,
|
||||
getHealthPercentage,
|
||||
getMaxHealth,
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ const googleAI = new GoogleGenAI({
|
|||
|
||||
const getTextGemini = async (
|
||||
matrixData: AIToolMatrixData,
|
||||
smartModel: boolean,
|
||||
instructions: IAIInstructions,
|
||||
input: string,
|
||||
oldInput?: string,
|
||||
|
|
@ -82,7 +83,9 @@ const getTextGemini = async (
|
|||
let response: GenerateContentResponse;
|
||||
try {
|
||||
response = await googleAI.models.generateContent({
|
||||
model: "gemini-3-flash-preview",
|
||||
model: smartModel
|
||||
? "gemini-3-pro-preview"
|
||||
: "gemini-3-flash-preview",
|
||||
contents: contents,
|
||||
config: {
|
||||
systemInstruction: JSON.stringify(instructions),
|
||||
|
|
@ -140,7 +143,9 @@ const getTextGemini = async (
|
|||
let responseTool: GenerateContentResponse;
|
||||
try {
|
||||
responseTool = await googleAI.models.generateContent({
|
||||
model: "gemini-3-flash-preview",
|
||||
model: smartModel
|
||||
? "gemini-3-pro-preview"
|
||||
: "gemini-3-flash-preview",
|
||||
contents: [
|
||||
...contents,
|
||||
content,
|
||||
|
|
@ -176,11 +181,18 @@ const getTextGemini = async (
|
|||
};
|
||||
};
|
||||
|
||||
const getImageGemini = async (input: string): Promise<AIResponseImage> => {
|
||||
const getImageGemini = async (
|
||||
input: string,
|
||||
smartModel: boolean,
|
||||
): Promise<AIResponseImage> => {
|
||||
log(`AI Image Request: ${input}`);
|
||||
|
||||
let response: GenerateContentResponse;
|
||||
try {
|
||||
response = await googleAI.models.generateContent({
|
||||
model: "gemini-2.5-flash-image",
|
||||
model: smartModel
|
||||
? "gemini-3-pro-image-preview"
|
||||
: "gemini-2.5-flash-image",
|
||||
contents: input,
|
||||
});
|
||||
} catch (e: unknown) {
|
||||
|
|
|
|||
|
|
@ -131,8 +131,13 @@ const tools: FunctionDeclaration[] = [
|
|||
type: "string",
|
||||
description: "a very detailed prompt to generate an image",
|
||||
},
|
||||
smart: {
|
||||
type: "boolean",
|
||||
description:
|
||||
"use a smarter, more expensive model. only true if the user explicitly requests it, otherwise false",
|
||||
},
|
||||
},
|
||||
required: ["prompt"],
|
||||
required: ["prompt", "smart"],
|
||||
},
|
||||
},
|
||||
];
|
||||
|
|
@ -293,6 +298,16 @@ const toolFunctions: AIToolFunction[] = [
|
|||
{
|
||||
name: "generateImage",
|
||||
function: (matrix, args) => {
|
||||
if (args.smart === true) {
|
||||
matrix.client.sendTextMessage(
|
||||
matrix.roomId,
|
||||
`!imgpro ${args.prompt}`,
|
||||
);
|
||||
return {
|
||||
message: "trying to generate using smart model...",
|
||||
};
|
||||
}
|
||||
|
||||
matrix.client.sendTextMessage(matrix.roomId, `!img ${args.prompt}`);
|
||||
|
||||
return {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue