mirror of
https://github.com/leon-ai/leon.git
synced 2024-12-03 02:45:21 +03:00
fix(server): update duties system prompt on mood and context info set
This commit is contained in:
parent
822e20f80b
commit
db33126664
@ -1,3 +1,5 @@
|
||||
import { EventEmitter } from 'node:events'
|
||||
|
||||
import {
|
||||
HOST,
|
||||
PORT,
|
||||
@ -41,6 +43,8 @@ export const PYTHON_TCP_CLIENT = new TCPClient(
|
||||
PYTHON_TCP_SERVER_PORT
|
||||
)
|
||||
|
||||
export const EVENT_EMITTER = new EventEmitter()
|
||||
|
||||
/**
|
||||
* Register core singletons
|
||||
*/
|
||||
|
@ -2,8 +2,10 @@ import type { LlamaChatSession } from 'node-llama-cpp'
|
||||
|
||||
import {
|
||||
type LLMDutyParams,
|
||||
type LLMDutyInitParams,
|
||||
type LLMDutyResult,
|
||||
LLMDuty
|
||||
LLMDuty,
|
||||
DEFAULT_INIT_PARAMS
|
||||
} from '@/core/llm-manager/llm-duty'
|
||||
import { LogHelper } from '@/helpers/log-helper'
|
||||
import { CONVERSATION_LOGGER, LLM_MANAGER, LLM_PROVIDER } from '@/core'
|
||||
@ -60,17 +62,39 @@ RESPONSE GUIDELINES:
|
||||
* If the utterance does not match any of the intents, respond with { "${JSON_KEY_RESPONSE}": "not_found" }. Do not make up new intents by yourself.`
|
||||
}
|
||||
|
||||
public async init(): Promise<void> {
|
||||
public async init(
|
||||
params: LLMDutyInitParams = DEFAULT_INIT_PARAMS
|
||||
): Promise<void> {
|
||||
if (LLM_PROVIDER_NAME === LLMProviders.Local) {
|
||||
if (!ActionRecognitionLLMDuty.session) {
|
||||
const { LlamaChatSession } = await Function(
|
||||
'return import("node-llama-cpp")'
|
||||
)()
|
||||
if (!ActionRecognitionLLMDuty.session || params.force) {
|
||||
LogHelper.title(this.name)
|
||||
LogHelper.info('Initializing...')
|
||||
|
||||
ActionRecognitionLLMDuty.session = new LlamaChatSession({
|
||||
contextSequence: LLM_MANAGER.context.getSequence(),
|
||||
systemPrompt: this.systemPrompt
|
||||
}) as LlamaChatSession
|
||||
try {
|
||||
const { LlamaChatSession } = await Function(
|
||||
'return import("node-llama-cpp")'
|
||||
)()
|
||||
|
||||
/**
|
||||
* Dispose the previous session and sequence
|
||||
* to give space for the new one
|
||||
*/
|
||||
if (params.force) {
|
||||
ActionRecognitionLLMDuty.session.dispose({ disposeSequence: true })
|
||||
LogHelper.info('Session disposed')
|
||||
}
|
||||
|
||||
ActionRecognitionLLMDuty.session = new LlamaChatSession({
|
||||
contextSequence: LLM_MANAGER.context.getSequence(),
|
||||
autoDisposeSequence: true,
|
||||
systemPrompt: this.systemPrompt
|
||||
}) as LlamaChatSession
|
||||
|
||||
LogHelper.success('Initialized')
|
||||
} catch (e) {
|
||||
LogHelper.title(this.name)
|
||||
LogHelper.error(`Failed to initialize: ${e}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -4,8 +4,10 @@ import type { MessageLog } from '@/types'
|
||||
import {
|
||||
type LLMDutyParams,
|
||||
type LLMDutyResult,
|
||||
type LLMDutyInitParams,
|
||||
type LLMDutyExecuteParams,
|
||||
LLMDuty,
|
||||
DEFAULT_INIT_PARAMS,
|
||||
DEFAULT_EXECUTE_PARAMS
|
||||
} from '@/core/llm-manager/llm-duty'
|
||||
import { LogHelper } from '@/helpers/log-helper'
|
||||
@ -16,28 +18,21 @@ import {
|
||||
LOOP_CONVERSATION_LOGGER,
|
||||
CONVERSATION_LOGGER,
|
||||
LLM_PROVIDER,
|
||||
SOCKET_SERVER
|
||||
SOCKET_SERVER,
|
||||
EVENT_EMITTER
|
||||
} from '@/core'
|
||||
import { LLM_THREADS } from '@/core/llm-manager/llm-manager'
|
||||
import { LLMProviders, LLMDuties } from '@/core/llm-manager/types'
|
||||
import { LLM_PROVIDER as LLM_PROVIDER_NAME } from '@/constants'
|
||||
import { StringHelper } from '@/helpers/string-helper'
|
||||
|
||||
interface InitParams {
|
||||
/**
|
||||
* Whether to use the loop history which is erased when Leon's instance is restarted.
|
||||
* If set to false, the main conversation history will be used
|
||||
*/
|
||||
useLoopHistory?: boolean
|
||||
}
|
||||
|
||||
export class ConversationLLMDuty extends LLMDuty {
|
||||
private static instance: ConversationLLMDuty
|
||||
private static context: LlamaContext = null as unknown as LlamaContext
|
||||
private static session: LlamaChatSession = null as unknown as LlamaChatSession
|
||||
private static messagesHistoryForNonLocalProvider: MessageLog[] =
|
||||
null as unknown as MessageLog[]
|
||||
protected systemPrompt = ``
|
||||
protected systemPrompt = ''
|
||||
protected readonly name = 'Conversation LLM Duty'
|
||||
protected input: LLMDutyParams['input'] = null
|
||||
|
||||
@ -49,71 +44,104 @@ export class ConversationLLMDuty extends LLMDuty {
|
||||
LogHelper.success('New instance')
|
||||
|
||||
ConversationLLMDuty.instance = this
|
||||
|
||||
EVENT_EMITTER.on('persona_new-mood-set', async () => {
|
||||
await this.init({ force: true })
|
||||
})
|
||||
EVENT_EMITTER.on('persona_new-info-set', async () => {
|
||||
await this.init({ force: true })
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
public async init(params: InitParams = {}): Promise<void> {
|
||||
params.useLoopHistory = params.useLoopHistory ?? true
|
||||
public async init(
|
||||
params: LLMDutyInitParams = DEFAULT_INIT_PARAMS
|
||||
): Promise<void> {
|
||||
LogHelper.title(this.name)
|
||||
LogHelper.info('Initializing...')
|
||||
|
||||
if (LLM_PROVIDER_NAME === LLMProviders.Local) {
|
||||
/**
|
||||
* A new context and session will be created only
|
||||
* when Leon's instance is restarted
|
||||
*/
|
||||
if (!ConversationLLMDuty.context || !ConversationLLMDuty.session) {
|
||||
await LOOP_CONVERSATION_LOGGER.clear()
|
||||
try {
|
||||
params.useLoopHistory = params.useLoopHistory ?? true
|
||||
|
||||
ConversationLLMDuty.context = await LLM_MANAGER.model.createContext({
|
||||
threads: LLM_THREADS
|
||||
})
|
||||
if (LLM_PROVIDER_NAME === LLMProviders.Local) {
|
||||
/**
|
||||
* A new context and session will be created only
|
||||
* when Leon's instance is restarted or when it is forced
|
||||
*/
|
||||
if (
|
||||
!ConversationLLMDuty.context ||
|
||||
!ConversationLLMDuty.session ||
|
||||
params.force
|
||||
) {
|
||||
await LOOP_CONVERSATION_LOGGER.clear()
|
||||
|
||||
const { LlamaChatSession } = await Function(
|
||||
'return import("node-llama-cpp")'
|
||||
)()
|
||||
if (params.force) {
|
||||
if (ConversationLLMDuty.context) {
|
||||
await ConversationLLMDuty.context.dispose()
|
||||
}
|
||||
if (ConversationLLMDuty.session) {
|
||||
ConversationLLMDuty.session.dispose({ disposeSequence: true })
|
||||
}
|
||||
}
|
||||
|
||||
this.systemPrompt = PERSONA.getConversationSystemPrompt()
|
||||
ConversationLLMDuty.context = await LLM_MANAGER.model.createContext({
|
||||
threads: LLM_THREADS
|
||||
})
|
||||
|
||||
ConversationLLMDuty.session = new LlamaChatSession({
|
||||
contextSequence: ConversationLLMDuty.context.getSequence(),
|
||||
systemPrompt: this.systemPrompt
|
||||
}) as LlamaChatSession
|
||||
const { LlamaChatSession } = await Function(
|
||||
'return import("node-llama-cpp")'
|
||||
)()
|
||||
|
||||
this.systemPrompt = PERSONA.getConversationSystemPrompt()
|
||||
|
||||
ConversationLLMDuty.session = new LlamaChatSession({
|
||||
contextSequence: ConversationLLMDuty.context.getSequence(),
|
||||
autoDisposeSequence: true,
|
||||
systemPrompt: this.systemPrompt
|
||||
}) as LlamaChatSession
|
||||
} else {
|
||||
let conversationLogger = LOOP_CONVERSATION_LOGGER
|
||||
|
||||
if (!params.useLoopHistory) {
|
||||
conversationLogger = CONVERSATION_LOGGER
|
||||
}
|
||||
|
||||
/**
|
||||
* As long as Leon's instance has not been restarted,
|
||||
* the context, session with history will be loaded
|
||||
*/
|
||||
const history = await LLM_MANAGER.loadHistory(
|
||||
conversationLogger,
|
||||
ConversationLLMDuty.session
|
||||
)
|
||||
|
||||
ConversationLLMDuty.session.setChatHistory(history)
|
||||
}
|
||||
} else {
|
||||
/**
|
||||
* For non-local providers:
|
||||
* Once Leon's instance is restarted, clean up the messages history,
|
||||
* then load the messages history
|
||||
*/
|
||||
|
||||
if (!ConversationLLMDuty.messagesHistoryForNonLocalProvider) {
|
||||
await LOOP_CONVERSATION_LOGGER.clear()
|
||||
}
|
||||
|
||||
let conversationLogger = LOOP_CONVERSATION_LOGGER
|
||||
|
||||
if (!params.useLoopHistory) {
|
||||
conversationLogger = CONVERSATION_LOGGER
|
||||
}
|
||||
|
||||
/**
|
||||
* As long as Leon's instance has not been restarted,
|
||||
* the context, session with history will be loaded
|
||||
*/
|
||||
const history = await LLM_MANAGER.loadHistory(
|
||||
conversationLogger,
|
||||
ConversationLLMDuty.session
|
||||
)
|
||||
|
||||
ConversationLLMDuty.session.setChatHistory(history)
|
||||
}
|
||||
} else {
|
||||
/**
|
||||
* For non-local providers:
|
||||
* Once Leon's instance is restarted, clean up the messages history,
|
||||
* then load the messages history
|
||||
*/
|
||||
|
||||
if (!ConversationLLMDuty.messagesHistoryForNonLocalProvider) {
|
||||
await LOOP_CONVERSATION_LOGGER.clear()
|
||||
ConversationLLMDuty.messagesHistoryForNonLocalProvider =
|
||||
await conversationLogger.load()
|
||||
}
|
||||
|
||||
let conversationLogger = LOOP_CONVERSATION_LOGGER
|
||||
|
||||
if (!params.useLoopHistory) {
|
||||
conversationLogger = CONVERSATION_LOGGER
|
||||
}
|
||||
|
||||
ConversationLLMDuty.messagesHistoryForNonLocalProvider =
|
||||
await conversationLogger.load()
|
||||
LogHelper.success('Initialized')
|
||||
} catch (e) {
|
||||
LogHelper.title(this.name)
|
||||
LogHelper.error(`Failed to initialize: ${e}`)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2,8 +2,10 @@ import type { LlamaChatSession } from 'node-llama-cpp'
|
||||
|
||||
import {
|
||||
type LLMDutyParams,
|
||||
type LLMDutyInitParams,
|
||||
type LLMDutyResult,
|
||||
LLMDuty
|
||||
LLMDuty,
|
||||
DEFAULT_INIT_PARAMS
|
||||
} from '@/core/llm-manager/llm-duty'
|
||||
import { LogHelper } from '@/helpers/log-helper'
|
||||
import { LLM_MANAGER, LLM_PROVIDER } from '@/core'
|
||||
@ -41,17 +43,39 @@ export class CustomNERLLMDuty<T> extends LLMDuty {
|
||||
this.data = params.data
|
||||
}
|
||||
|
||||
public async init(): Promise<void> {
|
||||
public async init(
|
||||
params: LLMDutyInitParams = DEFAULT_INIT_PARAMS
|
||||
): Promise<void> {
|
||||
if (LLM_PROVIDER_NAME === LLMProviders.Local) {
|
||||
if (!CustomNERLLMDuty.session) {
|
||||
const { LlamaChatSession } = await Function(
|
||||
'return import("node-llama-cpp")'
|
||||
)()
|
||||
if (!CustomNERLLMDuty.session || params.force) {
|
||||
LogHelper.title(this.name)
|
||||
LogHelper.info('Initializing...')
|
||||
|
||||
CustomNERLLMDuty.session = new LlamaChatSession({
|
||||
contextSequence: LLM_MANAGER.context.getSequence(),
|
||||
systemPrompt: this.systemPrompt
|
||||
})
|
||||
try {
|
||||
const { LlamaChatSession } = await Function(
|
||||
'return import("node-llama-cpp")'
|
||||
)()
|
||||
|
||||
/**
|
||||
* Dispose the previous session and sequence
|
||||
* to give space for the new one
|
||||
*/
|
||||
if (params.force) {
|
||||
CustomNERLLMDuty.session.dispose({ disposeSequence: true })
|
||||
LogHelper.info('Session disposed')
|
||||
}
|
||||
|
||||
CustomNERLLMDuty.session = new LlamaChatSession({
|
||||
contextSequence: LLM_MANAGER.context.getSequence(),
|
||||
autoDisposeSequence: true,
|
||||
systemPrompt: this.systemPrompt
|
||||
})
|
||||
|
||||
LogHelper.success('Initialized')
|
||||
} catch (e) {
|
||||
LogHelper.title(this.name)
|
||||
LogHelper.error(`Failed to initialize: ${e}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3,12 +3,20 @@ import type { LlamaChatSession } from 'node-llama-cpp'
|
||||
import {
|
||||
type LLMDutyParams,
|
||||
type LLMDutyResult,
|
||||
type LLMDutyInitParams,
|
||||
type LLMDutyExecuteParams,
|
||||
LLMDuty,
|
||||
DEFAULT_INIT_PARAMS,
|
||||
DEFAULT_EXECUTE_PARAMS
|
||||
} from '@/core/llm-manager/llm-duty'
|
||||
import { LogHelper } from '@/helpers/log-helper'
|
||||
import { LLM_MANAGER, LLM_PROVIDER, PERSONA, SOCKET_SERVER } from '@/core'
|
||||
import {
|
||||
EVENT_EMITTER,
|
||||
LLM_MANAGER,
|
||||
LLM_PROVIDER,
|
||||
PERSONA,
|
||||
SOCKET_SERVER
|
||||
} from '@/core'
|
||||
import { LLMProviders, LLMDuties } from '@/core/llm-manager/types'
|
||||
import { LLM_PROVIDER as LLM_PROVIDER_NAME } from '@/constants'
|
||||
import { StringHelper } from '@/helpers/string-helper'
|
||||
@ -18,6 +26,7 @@ interface ParaphraseLLMDutyParams extends LLMDutyParams {}
|
||||
export class ParaphraseLLMDuty extends LLMDuty {
|
||||
private static instance: ParaphraseLLMDuty
|
||||
private static session: LlamaChatSession = null as unknown as LlamaChatSession
|
||||
protected static finalSystemPrompt = ''
|
||||
protected systemPrompt = `You are an AI system that generates answers (Natural Language Generation).
|
||||
You must provide a text alternative according to your current mood and your personality.
|
||||
Never indicate that it's a modified version.
|
||||
@ -43,24 +52,55 @@ The sun is a star, it is the closest star to Earth.`
|
||||
LogHelper.success('New instance')
|
||||
|
||||
ParaphraseLLMDuty.instance = this
|
||||
|
||||
EVENT_EMITTER.on('persona_new-mood-set', async () => {
|
||||
await this.init({ force: true })
|
||||
})
|
||||
EVENT_EMITTER.on('persona_new-info-set', async () => {
|
||||
await this.init({ force: true })
|
||||
})
|
||||
}
|
||||
|
||||
this.input = params.input
|
||||
}
|
||||
|
||||
public async init(): Promise<void> {
|
||||
public async init(
|
||||
params: LLMDutyInitParams = DEFAULT_INIT_PARAMS
|
||||
): Promise<void> {
|
||||
if (LLM_PROVIDER_NAME === LLMProviders.Local) {
|
||||
if (!ParaphraseLLMDuty.session) {
|
||||
const { LlamaChatSession } = await Function(
|
||||
'return import("node-llama-cpp")'
|
||||
)()
|
||||
if (!ParaphraseLLMDuty.session || params.force) {
|
||||
LogHelper.title(this.name)
|
||||
LogHelper.info('Initializing...')
|
||||
|
||||
this.systemPrompt = PERSONA.getDutySystemPrompt(this.systemPrompt)
|
||||
try {
|
||||
const { LlamaChatSession } = await Function(
|
||||
'return import("node-llama-cpp")'
|
||||
)()
|
||||
|
||||
ParaphraseLLMDuty.session = new LlamaChatSession({
|
||||
contextSequence: LLM_MANAGER.context.getSequence(),
|
||||
systemPrompt: this.systemPrompt
|
||||
}) as LlamaChatSession
|
||||
/**
|
||||
* Dispose the previous session and sequence
|
||||
* to give space for the new one
|
||||
*/
|
||||
if (params.force) {
|
||||
ParaphraseLLMDuty.session.dispose({ disposeSequence: true })
|
||||
LogHelper.info('Session disposed')
|
||||
}
|
||||
|
||||
ParaphraseLLMDuty.finalSystemPrompt = PERSONA.getDutySystemPrompt(
|
||||
this.systemPrompt
|
||||
)
|
||||
|
||||
ParaphraseLLMDuty.session = new LlamaChatSession({
|
||||
contextSequence: LLM_MANAGER.context.getSequence(),
|
||||
autoDisposeSequence: true,
|
||||
systemPrompt: ParaphraseLLMDuty.finalSystemPrompt
|
||||
}) as LlamaChatSession
|
||||
|
||||
LogHelper.success('Initialized')
|
||||
} catch (e) {
|
||||
LogHelper.title(this.name)
|
||||
LogHelper.error(`Failed to initialize: ${e}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -75,7 +115,7 @@ The sun is a star, it is the closest star to Earth.`
|
||||
const prompt = `Modify the following text but do not say you modified it: ${this.input}`
|
||||
const completionParams = {
|
||||
dutyType: LLMDuties.Paraphrase,
|
||||
systemPrompt: this.systemPrompt,
|
||||
systemPrompt: ParaphraseLLMDuty.finalSystemPrompt,
|
||||
temperature: 0.8
|
||||
}
|
||||
let completionResult
|
||||
|
@ -14,6 +14,17 @@
|
||||
*/
|
||||
import { LLMDuties } from '@/core/llm-manager/types'
|
||||
|
||||
export interface LLMDutyInitParams {
|
||||
/**
|
||||
* Whether to use the loop history which is erased when Leon's instance is restarted.
|
||||
* If set to false, the main conversation history will be used
|
||||
*/
|
||||
useLoopHistory?: boolean
|
||||
/**
|
||||
* Force duty reinitialization
|
||||
*/
|
||||
force?: boolean
|
||||
}
|
||||
export interface LLMDutyExecuteParams {
|
||||
isWarmingUp?: boolean
|
||||
}
|
||||
@ -30,16 +41,20 @@ export interface LLMDutyResult {
|
||||
data: Record<string, unknown>
|
||||
}
|
||||
|
||||
export const DEFAULT_INIT_PARAMS: LLMDutyInitParams = {
|
||||
useLoopHistory: true,
|
||||
force: false
|
||||
}
|
||||
export const DEFAULT_EXECUTE_PARAMS: LLMDutyExecuteParams = {
|
||||
isWarmingUp: false
|
||||
}
|
||||
|
||||
export abstract class LLMDuty {
|
||||
protected abstract readonly name: string
|
||||
protected abstract readonly systemPrompt: LLMDutyParams['systemPrompt']
|
||||
protected abstract systemPrompt: LLMDutyParams['systemPrompt']
|
||||
protected abstract input: LLMDutyParams['input']
|
||||
|
||||
protected abstract init(): Promise<void>
|
||||
protected abstract init(params: LLMDutyInitParams): Promise<void>
|
||||
protected abstract execute(
|
||||
params: LLMDutyExecuteParams
|
||||
): Promise<LLMDutyResult | null>
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { SOCKET_SERVER } from '@/core'
|
||||
import { EVENT_EMITTER, SOCKET_SERVER } from '@/core'
|
||||
import { LogHelper } from '@/helpers/log-helper'
|
||||
import { StringHelper } from '@/helpers/string-helper'
|
||||
import { DateHelper } from '@/helpers/date-helper'
|
||||
@ -92,16 +92,15 @@ export default class Persona {
|
||||
this.setMood()
|
||||
setInterval(() => {
|
||||
this.setMood()
|
||||
EVENT_EMITTER.emit('persona_new-mood-set')
|
||||
}, 60_000 * 60)
|
||||
|
||||
this.setContextInfo()
|
||||
setInterval(() => {
|
||||
this.setContextInfo()
|
||||
}, 60_000 * 5)
|
||||
|
||||
this.setOwnerInfo()
|
||||
setInterval(() => {
|
||||
this.setContextInfo()
|
||||
this.setOwnerInfo()
|
||||
EVENT_EMITTER.emit('persona_new-info-set')
|
||||
}, 60_000 * 5)
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user