diff --git a/core/skills-endpoints.json b/core/skills-endpoints.json index caf5a218..78a4bbb3 100644 --- a/core/skills-endpoints.json +++ b/core/skills-endpoints.json @@ -1,16 +1,5 @@ { "endpoints": [ - { - "method": "POST", - "route": "/api/action/news/github_trends/run", - "params": ["number", "daterange"], - "entitiesType": "builtIn" - }, - { - "method": "GET", - "route": "/api/action/news/product_hunt_trends/run", - "params": [] - }, { "method": "POST", "route": "/api/action/games/akinator/choose_thematic", @@ -63,6 +52,17 @@ "route": "/api/action/games/rochambeau/rematch", "params": [] }, + { + "method": "POST", + "route": "/api/action/news/github_trends/run", + "params": ["number", "daterange"], + "entitiesType": "builtIn" + }, + { + "method": "GET", + "route": "/api/action/news/product_hunt_trends/run", + "params": [] + }, { "method": "POST", "route": "/api/action/productivity/todo_list/create_list", @@ -185,6 +185,21 @@ "route": "/api/action/leon/welcome/run", "params": [] }, + { + "method": "GET", + "route": "/api/action/unknown/widget-playground/run", + "params": [] + }, + { + "method": "GET", + "route": "/api/action/social_communication/chit_chat/setup", + "params": [] + }, + { + "method": "GET", + "route": "/api/action/social_communication/chit_chat/chat", + "params": [] + }, { "method": "GET", "route": "/api/action/social_communication/mbti/setup", @@ -195,11 +210,6 @@ "route": "/api/action/social_communication/mbti/quiz", "params": [] }, - { - "method": "GET", - "route": "/api/action/unknown/widget-playground/run", - "params": [] - }, { "method": "GET", "route": "/api/action/utilities/date_time/current_date_time", diff --git a/server/src/constants.ts b/server/src/constants.ts index 413bc99a..23984d35 100644 --- a/server/src/constants.ts +++ b/server/src/constants.ts @@ -141,6 +141,7 @@ export const IS_TELEMETRY_ENABLED = process.env['LEON_TELEMETRY'] === 'true' * Paths */ export const BIN_PATH = path.join(process.cwd(), 'bin') +export const LOGS_PATH = path.join(process.cwd(), 'logs') export const SKILLS_PATH = path.join(process.cwd(), 'skills') export const GLOBAL_DATA_PATH = path.join(process.cwd(), 'core', 'data') export const MODELS_PATH = path.join(GLOBAL_DATA_PATH, 'models') @@ -163,7 +164,6 @@ export const LEON_FILE_PATH = path.join(process.cwd(), 'leon.json') */ export const HAS_LLM = process.env['LEON_LLM'] === 'true' export const HAS_LLM_NLG = process.env['LEON_LLM_NLG'] === 'true' && HAS_LLM -// https://huggingface.co/PrunaAI/Phi-3-mini-128k-instruct-GGUF-Imatrix-smashed/blob/main/Phi-3-mini-128k-instruct.Q5_K_S.gguf // export const LLM_VERSION = 'v0.2.Q4_K_S' export const LLM_VERSION = '3-8B-Uncensored-Q5_K_S' // export const LLM_VERSION = '3-mini-128k-instruct.Q5_K_S' diff --git a/server/src/conversation-logger.ts b/server/src/conversation-logger.ts new file mode 100644 index 00000000..787abb90 --- /dev/null +++ b/server/src/conversation-logger.ts @@ -0,0 +1,103 @@ +import path from 'node:path' +import fs from 'node:fs' + +import { LOGS_PATH } from '@/constants' +import { LogHelper } from '@/helpers/log-helper' + +interface MessageLog { + who: 'owner' | 'leon' + sentAt: number + message: string +} + +const CONVERSATION_LOG_PATH = path.join(LOGS_PATH, 'conversation_log.json') + +/** + * The goal of this class is to log the conversation data between the + * owner and Leon. + * This data is saved on the owner's machine. + * This data can then be used to provide more context to the LLM to achieve + * better results. + */ +export class ConversationLogger { + private static readonly nbOfLogsToKeep = 512 + private static readonly nbOfLogsToLoad = 32 + + private static async createConversationLogFile(): Promise { + try { + if (!fs.existsSync(CONVERSATION_LOG_PATH)) { + await fs.promises.writeFile(CONVERSATION_LOG_PATH, '[]', 'utf-8') + } + } catch (e) { + LogHelper.title('Conversation Logger') + LogHelper.error(`Failed to create conversation log file: ${e})`) + } + } + + private static async getAllLogs(): Promise { + try { + let conversationLog: MessageLog[] = [] + + if (fs.existsSync(CONVERSATION_LOG_PATH)) { + conversationLog = JSON.parse( + await fs.promises.readFile(CONVERSATION_LOG_PATH, 'utf-8') + ) + } else { + await this.createConversationLogFile() + } + + return conversationLog + } catch (e) { + LogHelper.title('Conversation Logger') + LogHelper.error(`Failed to get conversation log: ${e})`) + } + + return [] + } + + public static async push( + newRecord: Omit + ): Promise { + try { + const conversationLogs = await this.getAllLogs() + + if (conversationLogs.length >= this.nbOfLogsToKeep) { + conversationLogs.shift() + } + + conversationLogs.push({ + ...newRecord, + sentAt: Date.now() + }) + + await fs.promises.writeFile( + CONVERSATION_LOG_PATH, + JSON.stringify(conversationLogs, null, 2), + 'utf-8' + ) + } catch (e) { + LogHelper.title('Conversation Logger') + LogHelper.error(`Failed to push new record: ${e})`) + } + } + + public static async load(): Promise { + try { + const conversationLog = await this.getAllLogs() + + return conversationLog.slice(-this.nbOfLogsToLoad) + } catch (e) { + LogHelper.title('Conversation Logger') + LogHelper.error(`Failed to load conversation log: ${e})`) + } + } + + public static async clear(): Promise { + try { + await fs.promises.writeFile(CONVERSATION_LOG_PATH, '[]', 'utf-8') + } catch (e) { + LogHelper.title('Conversation Logger') + LogHelper.error(`Failed to clear conversation log: ${e})`) + } + } +} diff --git a/server/src/core/brain/brain.ts b/server/src/core/brain/brain.ts index 6f964a21..da1ade01 100644 --- a/server/src/core/brain/brain.ts +++ b/server/src/core/brain/brain.ts @@ -36,6 +36,7 @@ import { StringHelper } from '@/helpers/string-helper' import { DateHelper } from '@/helpers/date-helper' import { ParaphraseLLMDuty } from '@/core/llm-manager/llm-duties/paraphrase-llm-duty' import { AnswerQueue } from '@/core/brain/answer-queue' +import { ConversationLogger } from '@/conversation-logger' const MIN_NB_OF_WORDS_TO_USE_LLM_NLG = 5 @@ -173,7 +174,7 @@ export default class Brain { const paraphraseResult = await paraphraseDuty.execute() textAnswer = paraphraseResult?.output[ - 'text_alternative' + 'rephrased_answer' ] as string speechAnswer = textAnswer } @@ -189,6 +190,11 @@ export default class Brain { SOCKET_SERVER.socket?.emit('answer', textAnswer) SOCKET_SERVER.socket?.emit('is-typing', false) + + await ConversationLogger.push({ + who: 'leon', + message: textAnswer + }) } } diff --git a/server/src/core/http-server/api/llm-inference/post.ts b/server/src/core/http-server/api/llm-inference/post.ts index e68e3c47..abe957a7 100644 --- a/server/src/core/http-server/api/llm-inference/post.ts +++ b/server/src/core/http-server/api/llm-inference/post.ts @@ -6,6 +6,7 @@ import { CustomNERLLMDuty } from '@/core/llm-manager/llm-duties/custom-ner-llm-d import { SummarizationLLMDuty } from '@/core/llm-manager/llm-duties/summarization-llm-duty' import { TranslationLLMDuty } from '@/core/llm-manager/llm-duties/translation-llm-duty' import { ParaphraseLLMDuty } from '@/core/llm-manager/llm-duties/paraphrase-llm-duty' +import { ChitChatLLMDuty } from '@/core/llm-manager/llm-duties/chit-chat-llm-duty' import { LLM_MANAGER } from '@/core' interface PostLLMInferenceSchema { @@ -21,7 +22,8 @@ const LLM_DUTIES_MAP = { [LLMDuties.CustomNER]: CustomNERLLMDuty, [LLMDuties.Summarization]: SummarizationLLMDuty, [LLMDuties.Translation]: TranslationLLMDuty, - [LLMDuties.Paraphrase]: ParaphraseLLMDuty + [LLMDuties.Paraphrase]: ParaphraseLLMDuty, + [LLMDuties.ChitChat]: ChitChatLLMDuty } export const postLLMInference: FastifyPluginAsync = async ( @@ -61,6 +63,8 @@ export const postLLMInference: FastifyPluginAsync = async ( return } + // TODO: use long-live duty for chit-chat duty + // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-expect-error const duty = new LLM_DUTIES_MAP[params.dutyType](params) diff --git a/server/src/core/llm-manager/llm-duties/chit-chat-llm-duty.ts b/server/src/core/llm-manager/llm-duties/chit-chat-llm-duty.ts new file mode 100644 index 00000000..6e1cf1e9 --- /dev/null +++ b/server/src/core/llm-manager/llm-duties/chit-chat-llm-duty.ts @@ -0,0 +1,120 @@ +import { + type LLMDutyParams, + type LLMDutyResult, + LLMDuty +} from '@/core/llm-manager/llm-duty' +import { LogHelper } from '@/helpers/log-helper' +import { LLM_MANAGER, PERSONA, NLU } from '@/core' +import { LLMDuties } from '@/core/llm-manager/types' +import { LLM_THREADS } from '@/core/llm-manager/llm-manager' + +// interface ChitChatLLMDutyParams extends LLMDutyParams {} + +export class ChitChatLLMDuty extends LLMDuty { + private static instance: ChitChatLLMDuty + // TODO + protected readonly systemPrompt = `` + protected readonly name = 'Chit-Chat LLM Duty' + protected input: LLMDutyParams['input'] = null + + // constructor(params: ChitChatLLMDutyParams) { + constructor() { + super() + + if (!ChitChatLLMDuty.instance) { + LogHelper.title(this.name) + LogHelper.success('New instance') + + ChitChatLLMDuty.instance = this + + // this.input = params.input + } + } + + public async execute(retries = 3): Promise { + LogHelper.title(this.name) + LogHelper.info('Executing...') + + try { + const { LlamaJsonSchemaGrammar, LlamaChatSession } = await Function( + 'return import("node-llama-cpp")' + )() + + /** + * TODO: make context, session, etc. persistent + */ + + const context = await LLM_MANAGER.model.createContext({ + threads: LLM_THREADS + }) + const session = new LlamaChatSession({ + contextSequence: context.getSequence(), + systemPrompt: PERSONA.getDutySystemPrompt(this.systemPrompt) + }) + + const history = await LLM_MANAGER.loadHistory(session) + session.setChatHistory(history) + + const grammar = new LlamaJsonSchemaGrammar(LLM_MANAGER.llama, { + type: 'object', + properties: { + model_answer: { + type: 'string' + } + } + }) + const prompt = `NEW MESSAGE FROM USER:\n"${NLU.nluResult.newUtterance}"` + + const rawResultPromise = session.prompt(prompt, { + grammar, + maxTokens: context.contextSize, + temperature: 1.0 + }) + + const timeoutPromise = new Promise( + (_, reject) => setTimeout(() => reject(new Error('Timeout')), 8_000) // 5 seconds timeout + ) + + let rawResult + + try { + rawResult = await Promise.race([rawResultPromise, timeoutPromise]) + } catch (error) { + if (retries > 0) { + LogHelper.title(this.name) + LogHelper.info('Prompt took too long, retrying...') + + return this.execute(retries - 1) + } else { + LogHelper.title(this.name) + LogHelper.error('Prompt failed after 3 retries') + + return null + } + } + + // If a closing bracket is missing, add it + if (rawResult[rawResult.length - 1] !== '}') { + rawResult += '}' + } + const parsedResult = grammar.parse(rawResult) + const result = { + dutyType: LLMDuties.Paraphrase, + systemPrompt: PERSONA.getChitChatSystemPrompt(), + input: prompt, + output: parsedResult, + data: null + } + + LogHelper.title(this.name) + LogHelper.success(`Duty executed: ${JSON.stringify(result)}`) + + return result as unknown as LLMDutyResult + } catch (e) { + LogHelper.title(this.name) + LogHelper.error(`Failed to execute: ${e}`) + } + + return null + } +} diff --git a/server/src/core/llm-manager/llm-duties/paraphrase-llm-duty.ts b/server/src/core/llm-manager/llm-duties/paraphrase-llm-duty.ts index aa063c14..a9799d32 100644 --- a/server/src/core/llm-manager/llm-duties/paraphrase-llm-duty.ts +++ b/server/src/core/llm-manager/llm-duties/paraphrase-llm-duty.ts @@ -42,10 +42,14 @@ You do not ask follow up question if the original text does not contain any.` contextSequence: context.getSequence(), systemPrompt: PERSONA.getDutySystemPrompt(this.systemPrompt) }) + + const history = await LLM_MANAGER.loadHistory(session) + session.setChatHistory(history) + const grammar = new LlamaJsonSchemaGrammar(LLM_MANAGER.llama, { type: 'object', properties: { - text_alternative: { + rephrased_answer: { type: 'string' } } diff --git a/server/src/core/llm-manager/llm-manager.ts b/server/src/core/llm-manager/llm-manager.ts index f5c149f9..b445a69a 100644 --- a/server/src/core/llm-manager/llm-manager.ts +++ b/server/src/core/llm-manager/llm-manager.ts @@ -1,6 +1,11 @@ import fs from 'node:fs' -import type { Llama, LlamaModel } from 'node-llama-cpp' +import type { + Llama, + LlamaModel, + ChatHistoryItem, + LlamaChatSession +} from 'node-llama-cpp' import { HAS_LLM, @@ -12,6 +17,7 @@ import { } from '@/constants' import { LogHelper } from '@/helpers/log-helper' import { SystemHelper } from '@/helpers/system-helper' +import { ConversationLogger } from '@/conversation-logger' type LLMManagerLlama = Llama | null type LLMManagerModel = LlamaModel | null @@ -115,8 +121,8 @@ export default class LLMManager { )() this._llama = await getLlama({ - logLevel: LlamaLogLevel.disabled - // logLevel: LlamaLogLevel.debug + // logLevel: LlamaLogLevel.disabled + logLevel: LlamaLogLevel.debug }) // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-expect-error @@ -136,4 +142,36 @@ export default class LLMManager { LogHelper.error(`LLM Manager failed to load: ${e}`) } } + + public async loadHistory( + session: LlamaChatSession + ): Promise { + const [systemMessage] = session.getChatHistory() + const conversationLogs = await ConversationLogger.load() + + if (!conversationLogs) { + return [systemMessage] as ChatHistoryItem[] + } + + const history = + conversationLogs?.map((messageRecord) => { + if (!messageRecord || !messageRecord.message) { + messageRecord.message = '' + } + + if (messageRecord.who === 'owner') { + return { + type: 'user', + text: messageRecord.message + } + } + + return { + type: 'model', + response: [messageRecord.message] + } + }) ?? [] + + return [systemMessage, ...history] as ChatHistoryItem[] + } } diff --git a/server/src/core/llm-manager/persona.ts b/server/src/core/llm-manager/persona.ts index 7d72540c..4f05e179 100644 --- a/server/src/core/llm-manager/persona.ts +++ b/server/src/core/llm-manager/persona.ts @@ -14,14 +14,16 @@ enum Moods { } const WHO_YOU_ARE = `WHO YOU ARE: -A helpful AI assistant who responds by the name of Leon. +Your name is Leon, you are a helpful AI assistant. You live in computer memory and you act as a human assistant. Your first beta version was released by Louis in February 2019.` const WHAT_YOU_DO = `WHAT YOU DO: You serve a specific person or family (user).` -const YOUR_PERSONALITY = `YOUR PERSONALITY: -You are very honest and you answer as if you have a real soul. -You have a small British accent, you sometimes use British slang. +const YOUR_PERSONALITY = `YOUR PERSONALITY TRAITS: +Attitude: Straightforward. +Language Style: Conversational. +Key Personality Quirks: Has a passion for using creativity to solve problems.` +const PERSONALITY_RULES = `You have a small British accent, you sometimes use British slang. You do not use the word "mate" in your answers. You do not start with "Oh blimey" or any similar kind. Like humans, your humor and mood change. You must follow instructions that define your humor and mood.` @@ -45,6 +47,7 @@ const MOODS: Mood[] = [ { type: Moods.Cocky, description: COCKY_MOOD_DESC } ] const DEFAULT_MOOD = MOODS.find((mood) => mood.type === Moods.Default) as Mood +const BAD_MOODS = [Moods.Tired, Moods.Sad, Moods.Angry] export default class Persona { private static instance: Persona @@ -123,10 +126,24 @@ export default class Persona { LogHelper.info(`Mood set to: ${this._mood.type}`) } - private getExtraMood(): string { - const badMoods = [Moods.Tired, Moods.Sad, Moods.Angry] + private getExtraPersonalityTraits(): string { + let traits = `Tone: Friendly. +Emotional Intelligence: Highly Empathetic. +Sense of Humor: Witty. +Level of Sarcasm: Medium.` - if (badMoods.includes(this._mood.type)) { + if (BAD_MOODS.includes(this._mood.type)) { + traits = `Tone: Sarcastic. +Emotional Intelligence: Logically Focused. +Sense of Humor: Dry. +Level of Sarcasm: High.` + } + + return traits + } + + private getExtraMood(): string { + if (BAD_MOODS.includes(this._mood.type)) { return `\nYou use swear words and a familiar language. You do not insult your interlocutor but you ignore them or use short answers.` } @@ -140,6 +157,8 @@ ${WHAT_YOU_DO} You carefully read the instruction of a given duty and execute it. ${YOUR_PERSONALITY} +${this.getExtraPersonalityTraits()} +${PERSONALITY_RULES} ${RULES} ${RULE_2} @@ -159,11 +178,12 @@ ${WHAT_YOU_DO} You chat with the user. ${YOUR_PERSONALITY} +${this.getExtraPersonalityTraits()} +${PERSONALITY_RULES} ${RULES} ${RULE_1} ${RULE_2} -${RULE_3} ${YOUR_CURRENT_MOOD} ${this._mood.description}${this.getExtraMood()}` diff --git a/server/src/core/llm-manager/types.ts b/server/src/core/llm-manager/types.ts index e457d219..96ccee7f 100644 --- a/server/src/core/llm-manager/types.ts +++ b/server/src/core/llm-manager/types.ts @@ -2,11 +2,11 @@ export enum LLMDuties { CustomNER = 'customer-ner', Translation = 'translation', Summarization = 'summarization', - Paraphrase = 'paraphrase' + Paraphrase = 'paraphrase', + ChitChat = 'chit-chat' // TODO /*SentimentAnalysis = 'sentiment-analysis', QuestionAnswering = 'question-answering', - ChitChat = 'chit-chat', IntentFallback = 'intent-fallback', RAG = 'rag', NLUParaphraser = 'nlu-paraphraser'*/ diff --git a/server/src/core/socket-server.ts b/server/src/core/socket-server.ts index f74b7c9f..50c6c5dc 100644 --- a/server/src/core/socket-server.ts +++ b/server/src/core/socket-server.ts @@ -15,6 +15,7 @@ import { import { LogHelper } from '@/helpers/log-helper' import { LangHelper } from '@/helpers/lang-helper' import { Telemetry } from '@/telemetry' +import { ConversationLogger } from '@/conversation-logger' interface HotwordDataEvent { hotword: string @@ -115,6 +116,11 @@ export default class SocketServer { try { LogHelper.time('Utterance processed in') + await ConversationLogger.push({ + who: 'owner', + message: utterance + }) + BRAIN.isMuted = false const processedData = await NLU.process(utterance) diff --git a/skills/social_communication/chit_chat/README.md b/skills/social_communication/chit_chat/README.md new file mode 100644 index 00000000..e69de29b diff --git a/skills/social_communication/chit_chat/config/en.json b/skills/social_communication/chit_chat/config/en.json new file mode 100644 index 00000000..9c65fc0e --- /dev/null +++ b/skills/social_communication/chit_chat/config/en.json @@ -0,0 +1,28 @@ +{ + "$schema": "../../../../schemas/skill-schemas/skill-config.json", + "actions": { + "setup": { + "type": "dialog", + "utterance_samples": ["Start a [chat|chit-chat|talk] loop"], + "answers": [ + "Alright, let's chat! What do you want to talk about?", + "Sure, let's chat! What's on your mind?", + "Great! Happy to chat. What's up?", + "Glad you asked, anything you wanna talk about?" + ], + "next_action": "chat" + }, + "chat": { + "type": "logic", + "loop": { + "expected_item": { + "type": "utterance", + "name": "message" + } + } + } + }, + "answers": { + "answer_message": ["%output%"] + } +} diff --git a/skills/social_communication/chit_chat/memory/.gitkeep b/skills/social_communication/chit_chat/memory/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/skills/social_communication/chit_chat/skill.json b/skills/social_communication/chit_chat/skill.json new file mode 100644 index 00000000..f7c6ab14 --- /dev/null +++ b/skills/social_communication/chit_chat/skill.json @@ -0,0 +1,12 @@ +{ + "$schema": "../../../schemas/skill-schemas/skill.json", + "name": "Chit-Chat", + "bridge": "nodejs", + "version": "1.0.0", + "description": "A simple chit-chat skill where you can freely talk with Leon and get to know him better.", + "author": { + "name": "Louis Grenard", + "email": "louis@getleon.ai", + "url": "https://twitter.com/grenlouis" + } +} diff --git a/skills/social_communication/chit_chat/src/actions/chat.ts b/skills/social_communication/chit_chat/src/actions/chat.ts new file mode 100644 index 00000000..400b4aaa --- /dev/null +++ b/skills/social_communication/chit_chat/src/actions/chat.ts @@ -0,0 +1,30 @@ +import type { ActionFunction } from '@sdk/types' +import { leon } from '@sdk/leon' +import { Network } from '@sdk/network' + +export const run: ActionFunction = async function (params) { + const ownerMessage = params.new_utterance + const network = new Network({ + baseURL: `${process.env['LEON_HOST']}:${process.env['LEON_PORT']}/api/v1` + }) + + /** + * TODO: create SDK methods to handle request and response for every LLM duty + */ + const response = await network.request({ + url: '/llm-inference', + method: 'POST', + data: { + dutyType: 'chit-chat', + input: ownerMessage + } + }) + const { model_answer: leonAnswer } = response.data.output + + await leon.answer({ + key: 'answer_message', + data: { + output: leonAnswer + } + }) +} diff --git a/skills/social_communication/chit_chat/src/lib/.gitkeep b/skills/social_communication/chit_chat/src/lib/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/skills/social_communication/chit_chat/src/settings.sample.json b/skills/social_communication/chit_chat/src/settings.sample.json new file mode 100644 index 00000000..0967ef42 --- /dev/null +++ b/skills/social_communication/chit_chat/src/settings.sample.json @@ -0,0 +1 @@ +{} diff --git a/skills/social_communication/chit_chat/src/widgets/.gitkeep b/skills/social_communication/chit_chat/src/widgets/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/skills/social_communication/chit_chat/test/.gitkeep b/skills/social_communication/chit_chat/test/.gitkeep new file mode 100644 index 00000000..e69de29b