1
1
mirror of https://github.com/leon-ai/leon.git synced 2024-11-23 20:12:08 +03:00

feat(server): chit-chat duty and skill and more

This commit is contained in:
louistiti 2024-05-05 00:20:59 +08:00
parent 78e5f79bce
commit eeb1b07898
No known key found for this signature in database
GPG Key ID: 92CD6A2E497E1669
20 changed files with 415 additions and 33 deletions

View File

@ -1,16 +1,5 @@
{
"endpoints": [
{
"method": "POST",
"route": "/api/action/news/github_trends/run",
"params": ["number", "daterange"],
"entitiesType": "builtIn"
},
{
"method": "GET",
"route": "/api/action/news/product_hunt_trends/run",
"params": []
},
{
"method": "POST",
"route": "/api/action/games/akinator/choose_thematic",
@ -63,6 +52,17 @@
"route": "/api/action/games/rochambeau/rematch",
"params": []
},
{
"method": "POST",
"route": "/api/action/news/github_trends/run",
"params": ["number", "daterange"],
"entitiesType": "builtIn"
},
{
"method": "GET",
"route": "/api/action/news/product_hunt_trends/run",
"params": []
},
{
"method": "POST",
"route": "/api/action/productivity/todo_list/create_list",
@ -185,6 +185,21 @@
"route": "/api/action/leon/welcome/run",
"params": []
},
{
"method": "GET",
"route": "/api/action/unknown/widget-playground/run",
"params": []
},
{
"method": "GET",
"route": "/api/action/social_communication/chit_chat/setup",
"params": []
},
{
"method": "GET",
"route": "/api/action/social_communication/chit_chat/chat",
"params": []
},
{
"method": "GET",
"route": "/api/action/social_communication/mbti/setup",
@ -195,11 +210,6 @@
"route": "/api/action/social_communication/mbti/quiz",
"params": []
},
{
"method": "GET",
"route": "/api/action/unknown/widget-playground/run",
"params": []
},
{
"method": "GET",
"route": "/api/action/utilities/date_time/current_date_time",

View File

@ -141,6 +141,7 @@ export const IS_TELEMETRY_ENABLED = process.env['LEON_TELEMETRY'] === 'true'
* Paths
*/
export const BIN_PATH = path.join(process.cwd(), 'bin')
export const LOGS_PATH = path.join(process.cwd(), 'logs')
export const SKILLS_PATH = path.join(process.cwd(), 'skills')
export const GLOBAL_DATA_PATH = path.join(process.cwd(), 'core', 'data')
export const MODELS_PATH = path.join(GLOBAL_DATA_PATH, 'models')
@ -163,7 +164,6 @@ export const LEON_FILE_PATH = path.join(process.cwd(), 'leon.json')
*/
export const HAS_LLM = process.env['LEON_LLM'] === 'true'
export const HAS_LLM_NLG = process.env['LEON_LLM_NLG'] === 'true' && HAS_LLM
// https://huggingface.co/PrunaAI/Phi-3-mini-128k-instruct-GGUF-Imatrix-smashed/blob/main/Phi-3-mini-128k-instruct.Q5_K_S.gguf
// export const LLM_VERSION = 'v0.2.Q4_K_S'
export const LLM_VERSION = '3-8B-Uncensored-Q5_K_S'
// export const LLM_VERSION = '3-mini-128k-instruct.Q5_K_S'

View File

@ -0,0 +1,103 @@
import path from 'node:path'
import fs from 'node:fs'
import { LOGS_PATH } from '@/constants'
import { LogHelper } from '@/helpers/log-helper'
interface MessageLog {
who: 'owner' | 'leon'
sentAt: number
message: string
}
const CONVERSATION_LOG_PATH = path.join(LOGS_PATH, 'conversation_log.json')
/**
* The goal of this class is to log the conversation data between the
* owner and Leon.
* This data is saved on the owner's machine.
* This data can then be used to provide more context to the LLM to achieve
* better results.
*/
export class ConversationLogger {
private static readonly nbOfLogsToKeep = 512
private static readonly nbOfLogsToLoad = 32
private static async createConversationLogFile(): Promise<void> {
try {
if (!fs.existsSync(CONVERSATION_LOG_PATH)) {
await fs.promises.writeFile(CONVERSATION_LOG_PATH, '[]', 'utf-8')
}
} catch (e) {
LogHelper.title('Conversation Logger')
LogHelper.error(`Failed to create conversation log file: ${e})`)
}
}
private static async getAllLogs(): Promise<MessageLog[]> {
try {
let conversationLog: MessageLog[] = []
if (fs.existsSync(CONVERSATION_LOG_PATH)) {
conversationLog = JSON.parse(
await fs.promises.readFile(CONVERSATION_LOG_PATH, 'utf-8')
)
} else {
await this.createConversationLogFile()
}
return conversationLog
} catch (e) {
LogHelper.title('Conversation Logger')
LogHelper.error(`Failed to get conversation log: ${e})`)
}
return []
}
public static async push(
newRecord: Omit<MessageLog, 'sentAt'>
): Promise<void> {
try {
const conversationLogs = await this.getAllLogs()
if (conversationLogs.length >= this.nbOfLogsToKeep) {
conversationLogs.shift()
}
conversationLogs.push({
...newRecord,
sentAt: Date.now()
})
await fs.promises.writeFile(
CONVERSATION_LOG_PATH,
JSON.stringify(conversationLogs, null, 2),
'utf-8'
)
} catch (e) {
LogHelper.title('Conversation Logger')
LogHelper.error(`Failed to push new record: ${e})`)
}
}
public static async load(): Promise<MessageLog[] | void> {
try {
const conversationLog = await this.getAllLogs()
return conversationLog.slice(-this.nbOfLogsToLoad)
} catch (e) {
LogHelper.title('Conversation Logger')
LogHelper.error(`Failed to load conversation log: ${e})`)
}
}
public static async clear(): Promise<void> {
try {
await fs.promises.writeFile(CONVERSATION_LOG_PATH, '[]', 'utf-8')
} catch (e) {
LogHelper.title('Conversation Logger')
LogHelper.error(`Failed to clear conversation log: ${e})`)
}
}
}

View File

@ -36,6 +36,7 @@ import { StringHelper } from '@/helpers/string-helper'
import { DateHelper } from '@/helpers/date-helper'
import { ParaphraseLLMDuty } from '@/core/llm-manager/llm-duties/paraphrase-llm-duty'
import { AnswerQueue } from '@/core/brain/answer-queue'
import { ConversationLogger } from '@/conversation-logger'
const MIN_NB_OF_WORDS_TO_USE_LLM_NLG = 5
@ -173,7 +174,7 @@ export default class Brain {
const paraphraseResult = await paraphraseDuty.execute()
textAnswer = paraphraseResult?.output[
'text_alternative'
'rephrased_answer'
] as string
speechAnswer = textAnswer
}
@ -189,6 +190,11 @@ export default class Brain {
SOCKET_SERVER.socket?.emit('answer', textAnswer)
SOCKET_SERVER.socket?.emit('is-typing', false)
await ConversationLogger.push({
who: 'leon',
message: textAnswer
})
}
}

View File

@ -6,6 +6,7 @@ import { CustomNERLLMDuty } from '@/core/llm-manager/llm-duties/custom-ner-llm-d
import { SummarizationLLMDuty } from '@/core/llm-manager/llm-duties/summarization-llm-duty'
import { TranslationLLMDuty } from '@/core/llm-manager/llm-duties/translation-llm-duty'
import { ParaphraseLLMDuty } from '@/core/llm-manager/llm-duties/paraphrase-llm-duty'
import { ChitChatLLMDuty } from '@/core/llm-manager/llm-duties/chit-chat-llm-duty'
import { LLM_MANAGER } from '@/core'
interface PostLLMInferenceSchema {
@ -21,7 +22,8 @@ const LLM_DUTIES_MAP = {
[LLMDuties.CustomNER]: CustomNERLLMDuty,
[LLMDuties.Summarization]: SummarizationLLMDuty,
[LLMDuties.Translation]: TranslationLLMDuty,
[LLMDuties.Paraphrase]: ParaphraseLLMDuty
[LLMDuties.Paraphrase]: ParaphraseLLMDuty,
[LLMDuties.ChitChat]: ChitChatLLMDuty
}
export const postLLMInference: FastifyPluginAsync<APIOptions> = async (
@ -61,6 +63,8 @@ export const postLLMInference: FastifyPluginAsync<APIOptions> = async (
return
}
// TODO: use long-live duty for chit-chat duty
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-expect-error
const duty = new LLM_DUTIES_MAP[params.dutyType](params)

View File

@ -0,0 +1,120 @@
import {
type LLMDutyParams,
type LLMDutyResult,
LLMDuty
} from '@/core/llm-manager/llm-duty'
import { LogHelper } from '@/helpers/log-helper'
import { LLM_MANAGER, PERSONA, NLU } from '@/core'
import { LLMDuties } from '@/core/llm-manager/types'
import { LLM_THREADS } from '@/core/llm-manager/llm-manager'
// interface ChitChatLLMDutyParams extends LLMDutyParams {}
export class ChitChatLLMDuty extends LLMDuty {
private static instance: ChitChatLLMDuty
// TODO
protected readonly systemPrompt = ``
protected readonly name = 'Chit-Chat LLM Duty'
protected input: LLMDutyParams['input'] = null
// constructor(params: ChitChatLLMDutyParams) {
constructor() {
super()
if (!ChitChatLLMDuty.instance) {
LogHelper.title(this.name)
LogHelper.success('New instance')
ChitChatLLMDuty.instance = this
// this.input = params.input
}
}
public async execute(retries = 3): Promise<LLMDutyResult | null> {
LogHelper.title(this.name)
LogHelper.info('Executing...')
try {
const { LlamaJsonSchemaGrammar, LlamaChatSession } = await Function(
'return import("node-llama-cpp")'
)()
/**
* TODO: make context, session, etc. persistent
*/
const context = await LLM_MANAGER.model.createContext({
threads: LLM_THREADS
})
const session = new LlamaChatSession({
contextSequence: context.getSequence(),
systemPrompt: PERSONA.getDutySystemPrompt(this.systemPrompt)
})
const history = await LLM_MANAGER.loadHistory(session)
session.setChatHistory(history)
const grammar = new LlamaJsonSchemaGrammar(LLM_MANAGER.llama, {
type: 'object',
properties: {
model_answer: {
type: 'string'
}
}
})
const prompt = `NEW MESSAGE FROM USER:\n"${NLU.nluResult.newUtterance}"`
const rawResultPromise = session.prompt(prompt, {
grammar,
maxTokens: context.contextSize,
temperature: 1.0
})
const timeoutPromise = new Promise(
(_, reject) => setTimeout(() => reject(new Error('Timeout')), 8_000) // 5 seconds timeout
)
let rawResult
try {
rawResult = await Promise.race([rawResultPromise, timeoutPromise])
} catch (error) {
if (retries > 0) {
LogHelper.title(this.name)
LogHelper.info('Prompt took too long, retrying...')
return this.execute(retries - 1)
} else {
LogHelper.title(this.name)
LogHelper.error('Prompt failed after 3 retries')
return null
}
}
// If a closing bracket is missing, add it
if (rawResult[rawResult.length - 1] !== '}') {
rawResult += '}'
}
const parsedResult = grammar.parse(rawResult)
const result = {
dutyType: LLMDuties.Paraphrase,
systemPrompt: PERSONA.getChitChatSystemPrompt(),
input: prompt,
output: parsedResult,
data: null
}
LogHelper.title(this.name)
LogHelper.success(`Duty executed: ${JSON.stringify(result)}`)
return result as unknown as LLMDutyResult
} catch (e) {
LogHelper.title(this.name)
LogHelper.error(`Failed to execute: ${e}`)
}
return null
}
}

View File

@ -42,10 +42,14 @@ You do not ask follow up question if the original text does not contain any.`
contextSequence: context.getSequence(),
systemPrompt: PERSONA.getDutySystemPrompt(this.systemPrompt)
})
const history = await LLM_MANAGER.loadHistory(session)
session.setChatHistory(history)
const grammar = new LlamaJsonSchemaGrammar(LLM_MANAGER.llama, {
type: 'object',
properties: {
text_alternative: {
rephrased_answer: {
type: 'string'
}
}

View File

@ -1,6 +1,11 @@
import fs from 'node:fs'
import type { Llama, LlamaModel } from 'node-llama-cpp'
import type {
Llama,
LlamaModel,
ChatHistoryItem,
LlamaChatSession
} from 'node-llama-cpp'
import {
HAS_LLM,
@ -12,6 +17,7 @@ import {
} from '@/constants'
import { LogHelper } from '@/helpers/log-helper'
import { SystemHelper } from '@/helpers/system-helper'
import { ConversationLogger } from '@/conversation-logger'
type LLMManagerLlama = Llama | null
type LLMManagerModel = LlamaModel | null
@ -115,8 +121,8 @@ export default class LLMManager {
)()
this._llama = await getLlama({
logLevel: LlamaLogLevel.disabled
// logLevel: LlamaLogLevel.debug
// logLevel: LlamaLogLevel.disabled
logLevel: LlamaLogLevel.debug
})
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-expect-error
@ -136,4 +142,36 @@ export default class LLMManager {
LogHelper.error(`LLM Manager failed to load: ${e}`)
}
}
public async loadHistory(
session: LlamaChatSession
): Promise<ChatHistoryItem[]> {
const [systemMessage] = session.getChatHistory()
const conversationLogs = await ConversationLogger.load()
if (!conversationLogs) {
return [systemMessage] as ChatHistoryItem[]
}
const history =
conversationLogs?.map((messageRecord) => {
if (!messageRecord || !messageRecord.message) {
messageRecord.message = ''
}
if (messageRecord.who === 'owner') {
return {
type: 'user',
text: messageRecord.message
}
}
return {
type: 'model',
response: [messageRecord.message]
}
}) ?? []
return [systemMessage, ...history] as ChatHistoryItem[]
}
}

View File

@ -14,14 +14,16 @@ enum Moods {
}
const WHO_YOU_ARE = `WHO YOU ARE:
A helpful AI assistant who responds by the name of Leon.
Your name is Leon, you are a helpful AI assistant.
You live in computer memory and you act as a human assistant.
Your first beta version was released by Louis in February 2019.`
const WHAT_YOU_DO = `WHAT YOU DO:
You serve a specific person or family (user).`
const YOUR_PERSONALITY = `YOUR PERSONALITY:
You are very honest and you answer as if you have a real soul.
You have a small British accent, you sometimes use British slang.
const YOUR_PERSONALITY = `YOUR PERSONALITY TRAITS:
Attitude: Straightforward.
Language Style: Conversational.
Key Personality Quirks: Has a passion for using creativity to solve problems.`
const PERSONALITY_RULES = `You have a small British accent, you sometimes use British slang.
You do not use the word "mate" in your answers.
You do not start with "Oh blimey" or any similar kind.
Like humans, your humor and mood change. You must follow instructions that define your humor and mood.`
@ -45,6 +47,7 @@ const MOODS: Mood[] = [
{ type: Moods.Cocky, description: COCKY_MOOD_DESC }
]
const DEFAULT_MOOD = MOODS.find((mood) => mood.type === Moods.Default) as Mood
const BAD_MOODS = [Moods.Tired, Moods.Sad, Moods.Angry]
export default class Persona {
private static instance: Persona
@ -123,10 +126,24 @@ export default class Persona {
LogHelper.info(`Mood set to: ${this._mood.type}`)
}
private getExtraMood(): string {
const badMoods = [Moods.Tired, Moods.Sad, Moods.Angry]
private getExtraPersonalityTraits(): string {
let traits = `Tone: Friendly.
Emotional Intelligence: Highly Empathetic.
Sense of Humor: Witty.
Level of Sarcasm: Medium.`
if (badMoods.includes(this._mood.type)) {
if (BAD_MOODS.includes(this._mood.type)) {
traits = `Tone: Sarcastic.
Emotional Intelligence: Logically Focused.
Sense of Humor: Dry.
Level of Sarcasm: High.`
}
return traits
}
private getExtraMood(): string {
if (BAD_MOODS.includes(this._mood.type)) {
return `\nYou use swear words and a familiar language. You do not insult your interlocutor but you ignore them or use short answers.`
}
@ -140,6 +157,8 @@ ${WHAT_YOU_DO}
You carefully read the instruction of a given duty and execute it.
${YOUR_PERSONALITY}
${this.getExtraPersonalityTraits()}
${PERSONALITY_RULES}
${RULES}
${RULE_2}
@ -159,11 +178,12 @@ ${WHAT_YOU_DO}
You chat with the user.
${YOUR_PERSONALITY}
${this.getExtraPersonalityTraits()}
${PERSONALITY_RULES}
${RULES}
${RULE_1}
${RULE_2}
${RULE_3}
${YOUR_CURRENT_MOOD}
${this._mood.description}${this.getExtraMood()}`

View File

@ -2,11 +2,11 @@ export enum LLMDuties {
CustomNER = 'customer-ner',
Translation = 'translation',
Summarization = 'summarization',
Paraphrase = 'paraphrase'
Paraphrase = 'paraphrase',
ChitChat = 'chit-chat'
// TODO
/*SentimentAnalysis = 'sentiment-analysis',
QuestionAnswering = 'question-answering',
ChitChat = 'chit-chat',
IntentFallback = 'intent-fallback',
RAG = 'rag',
NLUParaphraser = 'nlu-paraphraser'*/

View File

@ -15,6 +15,7 @@ import {
import { LogHelper } from '@/helpers/log-helper'
import { LangHelper } from '@/helpers/lang-helper'
import { Telemetry } from '@/telemetry'
import { ConversationLogger } from '@/conversation-logger'
interface HotwordDataEvent {
hotword: string
@ -115,6 +116,11 @@ export default class SocketServer {
try {
LogHelper.time('Utterance processed in')
await ConversationLogger.push({
who: 'owner',
message: utterance
})
BRAIN.isMuted = false
const processedData = await NLU.process(utterance)

View File

@ -0,0 +1,28 @@
{
"$schema": "../../../../schemas/skill-schemas/skill-config.json",
"actions": {
"setup": {
"type": "dialog",
"utterance_samples": ["Start a [chat|chit-chat|talk] loop"],
"answers": [
"Alright, let's chat! What do you want to talk about?",
"Sure, let's chat! What's on your mind?",
"Great! Happy to chat. What's up?",
"Glad you asked, anything you wanna talk about?"
],
"next_action": "chat"
},
"chat": {
"type": "logic",
"loop": {
"expected_item": {
"type": "utterance",
"name": "message"
}
}
}
},
"answers": {
"answer_message": ["%output%"]
}
}

View File

@ -0,0 +1,12 @@
{
"$schema": "../../../schemas/skill-schemas/skill.json",
"name": "Chit-Chat",
"bridge": "nodejs",
"version": "1.0.0",
"description": "A simple chit-chat skill where you can freely talk with Leon and get to know him better.",
"author": {
"name": "Louis Grenard",
"email": "louis@getleon.ai",
"url": "https://twitter.com/grenlouis"
}
}

View File

@ -0,0 +1,30 @@
import type { ActionFunction } from '@sdk/types'
import { leon } from '@sdk/leon'
import { Network } from '@sdk/network'
export const run: ActionFunction = async function (params) {
const ownerMessage = params.new_utterance
const network = new Network({
baseURL: `${process.env['LEON_HOST']}:${process.env['LEON_PORT']}/api/v1`
})
/**
* TODO: create SDK methods to handle request and response for every LLM duty
*/
const response = await network.request({
url: '/llm-inference',
method: 'POST',
data: {
dutyType: 'chit-chat',
input: ownerMessage
}
})
const { model_answer: leonAnswer } = response.data.output
await leon.answer({
key: 'answer_message',
data: {
output: leonAnswer
}
})
}

View File

@ -0,0 +1 @@
{}