1
1
mirror of https://github.com/leon-ai/leon.git synced 2024-10-26 18:18:46 +03:00

style: fix linting

This commit is contained in:
Divlo 2023-04-02 19:18:15 +02:00
parent 070f0e1469
commit c49f931da4
No known key found for this signature in database
GPG Key ID: 8F9478F220CE65E9
28 changed files with 299 additions and 168 deletions

View File

@ -26,7 +26,6 @@
"plugins": ["@typescript-eslint", "unicorn", "import"], "plugins": ["@typescript-eslint", "unicorn", "import"],
"ignorePatterns": "*.spec.js", "ignorePatterns": "*.spec.js",
"rules": { "rules": {
"quotes": ["error", "single"],
"@typescript-eslint/no-non-null-assertion": ["off"], "@typescript-eslint/no-non-null-assertion": ["off"],
"no-async-promise-executor": ["off"], "no-async-promise-executor": ["off"],
"no-underscore-dangle": ["error", { "allowAfterThis": true }], "no-underscore-dangle": ["error", { "allowAfterThis": true }],

1
.gitignore vendored
View File

@ -26,6 +26,7 @@ tcp_server/src/Pipfile.lock
!tcp_server/**/.gitkeep !tcp_server/**/.gitkeep
!bridges/python/**/.gitkeep !bridges/python/**/.gitkeep
!**/*.sample* !**/*.sample*
server/src/core/http-server/old-server.js
packages/**/config/config.json packages/**/config/config.json
skills/**/src/config.json skills/**/src/config.json
packages/**/data/db/*.json packages/**/data/db/*.json

View File

@ -3,9 +3,7 @@
{ {
"method": "POST", "method": "POST",
"route": "/api/action/games/akinator/choose_thematic", "route": "/api/action/games/akinator/choose_thematic",
"params": [ "params": ["thematic"],
"thematic"
],
"entitiesType": "trim" "entitiesType": "trim"
}, },
{ {
@ -46,9 +44,7 @@
{ {
"method": "POST", "method": "POST",
"route": "/api/action/games/rochambeau/play", "route": "/api/action/games/rochambeau/play",
"params": [ "params": ["handsign"],
"handsign"
],
"entitiesType": "trim" "entitiesType": "trim"
}, },
{ {
@ -124,10 +120,7 @@
{ {
"method": "POST", "method": "POST",
"route": "/api/action/news/github_trends/run", "route": "/api/action/news/github_trends/run",
"params": [ "params": ["number", "daterange"],
"number",
"daterange"
],
"entitiesType": "builtIn" "entitiesType": "builtIn"
}, },
{ {
@ -138,9 +131,7 @@
{ {
"method": "POST", "method": "POST",
"route": "/api/action/productivity/todo_list/create_list", "route": "/api/action/productivity/todo_list/create_list",
"params": [ "params": ["list"],
"list"
],
"entitiesType": "trim" "entitiesType": "trim"
}, },
{ {
@ -151,53 +142,37 @@
{ {
"method": "POST", "method": "POST",
"route": "/api/action/productivity/todo_list/view_list", "route": "/api/action/productivity/todo_list/view_list",
"params": [ "params": ["list"],
"list"
],
"entitiesType": "trim" "entitiesType": "trim"
}, },
{ {
"method": "POST", "method": "POST",
"route": "/api/action/productivity/todo_list/rename_list", "route": "/api/action/productivity/todo_list/rename_list",
"params": [ "params": ["old_list", "new_list"],
"old_list",
"new_list"
],
"entitiesType": "trim" "entitiesType": "trim"
}, },
{ {
"method": "POST", "method": "POST",
"route": "/api/action/productivity/todo_list/delete_list", "route": "/api/action/productivity/todo_list/delete_list",
"params": [ "params": ["list"],
"list"
],
"entitiesType": "trim" "entitiesType": "trim"
}, },
{ {
"method": "POST", "method": "POST",
"route": "/api/action/productivity/todo_list/add_todos", "route": "/api/action/productivity/todo_list/add_todos",
"params": [ "params": ["todos", "list"],
"todos",
"list"
],
"entitiesType": "trim" "entitiesType": "trim"
}, },
{ {
"method": "POST", "method": "POST",
"route": "/api/action/productivity/todo_list/complete_todos", "route": "/api/action/productivity/todo_list/complete_todos",
"params": [ "params": ["todos", "list"],
"todos",
"list"
],
"entitiesType": "trim" "entitiesType": "trim"
}, },
{ {
"method": "POST", "method": "POST",
"route": "/api/action/productivity/todo_list/uncheck_todos", "route": "/api/action/productivity/todo_list/uncheck_todos",
"params": [ "params": ["todos", "list"],
"todos",
"list"
],
"entitiesType": "trim" "entitiesType": "trim"
}, },
{ {
@ -218,9 +193,7 @@
{ {
"method": "POST", "method": "POST",
"route": "/api/action/utilities/is_it_down/run", "route": "/api/action/utilities/is_it_down/run",
"params": [ "params": ["url"],
"url"
],
"entitiesType": "builtIn" "entitiesType": "builtIn"
}, },
{ {
@ -234,4 +207,4 @@
"params": [] "params": []
} }
] ]
} }

View File

@ -30,7 +30,7 @@ dotenv.config()
;(async () => { ;(async () => {
try { try {
const nodeMinRequiredVersion = '16' const nodeMinRequiredVersion = '16'
const npmMinRequiredVersion = '5' const npmMinRequiredVersion = '8'
const minimumRequiredRAM = 4 const minimumRequiredRAM = 4
const flitePath = 'bin/flite/flite' const flitePath = 'bin/flite/flite'
const coquiLanguageModelPath = 'bin/coqui/huge-vocabulary.scorer' const coquiLanguageModelPath = 'bin/coqui/huge-vocabulary.scorer'
@ -215,7 +215,9 @@ dotenv.config()
const p = await command( const p = await command(
`${PYTHON_BRIDGE_BIN_PATH} "${path.join( `${PYTHON_BRIDGE_BIN_PATH} "${path.join(
process.cwd(), process.cwd(),
'scripts', 'assets', 'intent-object.json' 'scripts',
'assets',
'intent-object.json'
)}"`, )}"`,
{ shell: true } { shell: true }
) )

View File

@ -141,7 +141,7 @@ SPACY_MODELS.set('fr', {
} }
try { try {
await command(`pipenv install --verbose --site-packages`, { await command('pipenv install --verbose --site-packages', {
shell: true, shell: true,
stdio: 'inherit' stdio: 'inherit'
}) })
@ -155,21 +155,21 @@ SPACY_MODELS.set('fr', {
LogHelper.info( LogHelper.info(
'Installing Rust installer as it is needed for the "tokenizers" package for macOS ARM64 architecture...' 'Installing Rust installer as it is needed for the "tokenizers" package for macOS ARM64 architecture...'
) )
await command(`curl https://sh.rustup.rs -sSf | sh -s -- -y`, { await command('curl https://sh.rustup.rs -sSf | sh -s -- -y', {
shell: true, shell: true,
stdio: 'inherit' stdio: 'inherit'
}) })
LogHelper.success('Rust installer installed') LogHelper.success('Rust installer installed')
LogHelper.info('Reloading configuration from "$HOME/.cargo/env"...') LogHelper.info('Reloading configuration from "$HOME/.cargo/env"...')
await command(`source "$HOME/.cargo/env"`, { await command('source "$HOME/.cargo/env"', {
shell: true, shell: true,
stdio: 'inherit' stdio: 'inherit'
}) })
LogHelper.success('Configuration reloaded') LogHelper.success('Configuration reloaded')
LogHelper.info('Checking Rust compiler version...') LogHelper.info('Checking Rust compiler version...')
await command(`rustc --version`, { await command('rustc --version', {
shell: true, shell: true,
stdio: 'inherit' stdio: 'inherit'
}) })

View File

@ -34,10 +34,10 @@ const TCP_SERVER_VERSION_FILE_PATH = path.join(
) )
export const [, PYTHON_BRIDGE_VERSION] = fs export const [, PYTHON_BRIDGE_VERSION] = fs
.readFileSync(PYTHON_BRIDGE_VERSION_FILE_PATH, 'utf8') .readFileSync(PYTHON_BRIDGE_VERSION_FILE_PATH, 'utf8')
.split('\'') .split("'")
export const [, TCP_SERVER_VERSION] = fs export const [, TCP_SERVER_VERSION] = fs
.readFileSync(TCP_SERVER_VERSION_FILE_PATH, 'utf8') .readFileSync(TCP_SERVER_VERSION_FILE_PATH, 'utf8')
.split('\'') .split("'")
export const PYTHON_BRIDGE_BIN_NAME = 'leon-python-bridge' export const PYTHON_BRIDGE_BIN_NAME = 'leon-python-bridge'
export const TCP_SERVER_BIN_NAME = 'leon-tcp-server' export const TCP_SERVER_BIN_NAME = 'leon-tcp-server'

View File

@ -10,7 +10,11 @@ import type {
NLUResult NLUResult
} from '@/core/nlp/types' } from '@/core/nlp/types'
import type { SkillConfigSchema } from '@/schemas/skill-schemas' import type { SkillConfigSchema } from '@/schemas/skill-schemas'
import type { BrainProcessResult, IntentObject, SkillResult } from '@/core/brain/types' import type {
BrainProcessResult,
IntentObject,
SkillResult
} from '@/core/brain/types'
import { SkillActionType, SkillOutputType } from '@/core/brain/types' import { SkillActionType, SkillOutputType } from '@/core/brain/types'
import { langs } from '@@/core/langs.json' import { langs } from '@@/core/langs.json'
import { HAS_TTS, PYTHON_BRIDGE_BIN_PATH, TMP_PATH } from '@/constants' import { HAS_TTS, PYTHON_BRIDGE_BIN_PATH, TMP_PATH } from '@/constants'
@ -107,7 +111,11 @@ export default class Brain {
/** /**
* Pickup speech info we need to return * Pickup speech info we need to return
*/ */
public wernicke(type: string, key?: string, obj?: Record<string, unknown>): string { public wernicke(
type: string,
key?: string,
obj?: Record<string, unknown>
): string {
let answerObject: Record<string, string> = {} let answerObject: Record<string, string> = {}
let answer = '' let answer = ''
@ -152,7 +160,11 @@ export default class Brain {
/** /**
* Create the intent object that will be passed to the skill * Create the intent object that will be passed to the skill
*/ */
private createIntentObject(nluResult: NLUResult, utteranceId: string, slots: IntentObject['slots']): IntentObject { private createIntentObject(
nluResult: NLUResult,
utteranceId: string,
slots: IntentObject['slots']
): IntentObject {
return { return {
id: utteranceId, id: utteranceId,
lang: this._lang, lang: this._lang,
@ -171,7 +183,9 @@ export default class Brain {
/** /**
* Handle the skill process output * Handle the skill process output
*/ */
private handleLogicActionSkillProcessOutput(data: Buffer): Promise<Error | null> | void { private handleLogicActionSkillProcessOutput(
data: Buffer
): Promise<Error | null> | void {
try { try {
const obj = JSON.parse(data.toString()) const obj = JSON.parse(data.toString())
@ -191,7 +205,11 @@ export default class Brain {
return Promise.resolve(null) return Promise.resolve(null)
} else { } else {
return Promise.reject(new Error(`The "${this.skillFriendlyName}" skill from the "${this.domainFriendlyName}" domain is not well configured. Check the configuration file.`)) return Promise.reject(
new Error(
`The "${this.skillFriendlyName}" skill from the "${this.domainFriendlyName}" domain is not well configured. Check the configuration file.`
)
)
} }
} catch (e) { } catch (e) {
LogHelper.title('Brain') LogHelper.title('Brain')
@ -217,7 +235,10 @@ export default class Brain {
/** /**
* Handle the skill process error * Handle the skill process error
*/ */
private handleLogicActionSkillProcessError(data: Buffer, intentObjectPath: string): Error { private handleLogicActionSkillProcessError(
data: Buffer,
intentObjectPath: string
): Error {
this.speakSkillError() this.speakSkillError()
Brain.deleteIntentObjFile(intentObjectPath) Brain.deleteIntentObjFile(intentObjectPath)
@ -250,7 +271,11 @@ export default class Brain {
}) })
} }
const intentObject = this.createIntentObject(nluResult, utteranceId, slots) const intentObject = this.createIntentObject(
nluResult,
utteranceId,
slots
)
try { try {
fs.writeFileSync(intentObjectPath, JSON.stringify(intentObject)) fs.writeFileSync(intentObjectPath, JSON.stringify(intentObject))
@ -294,8 +319,13 @@ export default class Brain {
skillConfigPath, skillConfigPath,
classification: { action: actionName } classification: { action: actionName }
} = nluResult } = nluResult
const { actions } = SkillDomainHelper.getSkillConfig(skillConfigPath, this._lang) const { actions } = SkillDomainHelper.getSkillConfig(
const action = actions[actionName] as SkillConfigSchema['actions'][string] skillConfigPath,
this._lang
)
const action = actions[
actionName
] as SkillConfigSchema['actions'][string]
const { type: actionType } = action const { type: actionType } = action
const nextAction = action.next_action const nextAction = action.next_action
? actions[action.next_action] ? actions[action.next_action]
@ -343,7 +373,8 @@ export default class Brain {
skillResult = JSON.parse(this.skillOutput) skillResult = JSON.parse(this.skillOutput)
if (skillResult?.output.speech) { if (skillResult?.output.speech) {
skillResult.output.speech = skillResult.output.speech.toString() skillResult.output.speech =
skillResult.output.speech.toString()
if (!this.isMuted) { if (!this.isMuted) {
this.talk(skillResult.output.speech, true) this.talk(skillResult.output.speech, true)
} }
@ -354,7 +385,8 @@ export default class Brain {
skillResult.output.type === SkillOutputType.End && skillResult.output.type === SkillOutputType.End &&
skillResult.output.options['synchronization'] && skillResult.output.options['synchronization'] &&
skillResult.output.options['synchronization'].enabled && skillResult.output.options['synchronization'].enabled &&
skillResult.output.options['synchronization'].enabled === true skillResult.output.options['synchronization'].enabled ===
true
) { ) {
const sync = new Synchronizer( const sync = new Synchronizer(
this, this,
@ -373,7 +405,9 @@ export default class Brain {
} }
} catch (e) { } catch (e) {
LogHelper.title(`${this.skillFriendlyName} skill`) LogHelper.title(`${this.skillFriendlyName} skill`)
LogHelper.error(`There is an error on the final output: ${String(e)}`) LogHelper.error(
`There is an error on the final output: ${String(e)}`
)
this.speakSkillError() this.speakSkillError()
} }
@ -395,7 +429,10 @@ export default class Brain {
) { ) {
SOCKET_SERVER.socket.emit('suggest', nextAction.suggestions) SOCKET_SERVER.socket.emit('suggest', nextAction.suggestions)
} }
if (action?.suggestions && skillResult?.output.core?.showSuggestions) { if (
action?.suggestions &&
skillResult?.output.core?.showSuggestions
) {
SOCKET_SERVER.socket.emit('suggest', action.suggestions) SOCKET_SERVER.socket.emit('suggest', action.suggestions)
} }
@ -426,10 +463,8 @@ export default class Brain {
'config', 'config',
this._lang + '.json' this._lang + '.json'
) )
const { actions, entities: skillConfigEntities } = SkillDomainHelper.getSkillConfig( const { actions, entities: skillConfigEntities } =
configFilePath, SkillDomainHelper.getSkillConfig(configFilePath, this._lang)
this._lang
)
const utteranceHasEntities = nluResult.entities.length > 0 const utteranceHasEntities = nluResult.entities.length > 0
const { answers: rawAnswers } = nluResult const { answers: rawAnswers } = nluResult
let answers = rawAnswers let answers = rawAnswers
@ -453,10 +488,14 @@ export default class Brain {
// In case the expected answer requires a known entity // In case the expected answer requires a known entity
if (answer?.indexOf('{{') !== -1) { if (answer?.indexOf('{{') !== -1) {
// TODO // TODO
const unknownAnswers = actions[nluResult.classification.action]?.unknown_answers const unknownAnswers =
actions[nluResult.classification.action]?.unknown_answers
if (unknownAnswers) { if (unknownAnswers) {
answer = unknownAnswers[Math.floor(Math.random() * unknownAnswers.length)] answer =
unknownAnswers[
Math.floor(Math.random() * unknownAnswers.length)
]
} }
} }
} else { } else {
@ -469,7 +508,8 @@ export default class Brain {
if (utteranceHasEntities && answer?.indexOf('{{') !== -1) { if (utteranceHasEntities && answer?.indexOf('{{') !== -1) {
nluResult.currentEntities.forEach((entityObj) => { nluResult.currentEntities.forEach((entityObj) => {
answer = StringHelper.findAndMap(answer as string, { answer = StringHelper.findAndMap(answer as string, {
[`{{ ${entityObj.entity} }}`]: (entityObj as NERCustomEntity).resolution.value [`{{ ${entityObj.entity} }}`]: (entityObj as NERCustomEntity)
.resolution.value
}) })
/** /**
@ -488,7 +528,8 @@ export default class Brain {
if (entity && dataKey && entity === entityObj.entity) { if (entity && dataKey && entity === entityObj.entity) {
const { option } = entityObj as CustomEnumEntity const { option } = entityObj as CustomEnumEntity
const entityOption = skillConfigEntities[entity]?.options[option] const entityOption =
skillConfigEntities[entity]?.options[option]
const entityOptionData = entityOption?.data const entityOptionData = entityOption?.data
let valuesArr: string[] = [] let valuesArr: string[] = []
@ -500,7 +541,9 @@ export default class Brain {
if (valuesArr.length > 0) { if (valuesArr.length > 0) {
answer = StringHelper.findAndMap(answer as string, { answer = StringHelper.findAndMap(answer as string, {
[match]: [match]:
valuesArr[Math.floor(Math.random() * valuesArr.length)] valuesArr[
Math.floor(Math.random() * valuesArr.length)
]
}) })
} }
} }

View File

@ -5,7 +5,8 @@ import type {
NLPSkill, NLPSkill,
NLPUtterance, NLPUtterance,
NLUResolver, NLUResolver,
NLUResult, NLUSlot, NLUResult,
NLUSlot,
NLUSlots NLUSlots
} from '@/core/nlp/types' } from '@/core/nlp/types'
import type { SkillConfigSchema } from '@/schemas/skill-schemas' import type { SkillConfigSchema } from '@/schemas/skill-schemas'
@ -62,7 +63,7 @@ export interface IntentObject {
export interface BrainProcessResult extends NLUResult { export interface BrainProcessResult extends NLUResult {
speeches: string[] speeches: string[]
executionTime: number executionTime: number
utteranceId? : string utteranceId?: string
lang?: ShortLanguageCode lang?: ShortLanguageCode
core?: SkillCoreData | undefined core?: SkillCoreData | undefined
action?: SkillConfigSchema['actions'][string] action?: SkillConfigSchema['actions'][string]

View File

@ -81,7 +81,9 @@ export default class Conversation {
const newContextName = `${domain}.${skillName}` const newContextName = `${domain}.${skillName}`
const { actions } = SkillDomainHelper.getSkillConfig(skillConfigPath, lang) const { actions } = SkillDomainHelper.getSkillConfig(skillConfigPath, lang)
// Grab next action from the NLU data file // Grab next action from the NLU data file
const { next_action: nextAction } = actions[actionName] as { next_action: string } const { next_action: nextAction } = actions[actionName] as {
next_action: string
}
// If slots are required to trigger next actions, then go through the context activation // If slots are required to trigger next actions, then go through the context activation
if (slotKeys.length > 0) { if (slotKeys.length > 0) {
@ -183,7 +185,7 @@ export default class Conversation {
// If it's the first slot setting grabbed from the model or not // If it's the first slot setting grabbed from the model or not
if (isFirstSet) { if (isFirstSet) {
[slotName, slotEntity] = key.split('#') as [string, string] ;[slotName, slotEntity] = key.split('#') as [string, string]
questions = slotObj.locales?.[lang] as string[] questions = slotObj.locales?.[lang] as string[]
} }
@ -191,8 +193,9 @@ export default class Conversation {
const [foundEntity] = entities.filter( const [foundEntity] = entities.filter(
({ entity }) => entity === slotEntity && !slotObj.isFilled ({ entity }) => entity === slotEntity && !slotObj.isFilled
) )
const pickedQuestion = const pickedQuestion = questions[
questions[Math.floor(Math.random() * questions.length)] as string Math.floor(Math.random() * questions.length)
] as string
const slot = this._activeContext.slots[slotName] const slot = this._activeContext.slots[slotName]
const newSlot = { const newSlot = {
name: slotName, name: slotName,

View File

@ -8,6 +8,11 @@ import { LogHelper } from '@/helpers/log-helper'
import { SkillDomainHelper } from '@/helpers/skill-domain-helper' import { SkillDomainHelper } from '@/helpers/skill-domain-helper'
import { DEFAULT_NLU_RESULT } from '@/core/nlp/nlu/nlu' import { DEFAULT_NLU_RESULT } from '@/core/nlp/nlu/nlu'
interface ResolveResolversResult {
name: string
value: string
}
export class ActionLoop { export class ActionLoop {
/** /**
* Handle action loop logic before NLU processing * Handle action loop logic before NLU processing
@ -43,7 +48,10 @@ export class ActionLoop {
NLU.nluResult NLU.nluResult
) )
const { actions, resolvers } = SkillDomainHelper.getSkillConfig(skillConfigPath, BRAIN.lang) const { actions, resolvers } = SkillDomainHelper.getSkillConfig(
skillConfigPath,
BRAIN.lang
)
const action = actions[NLU.nluResult.classification.action] const action = actions[NLU.nluResult.classification.action]
if (action?.loop) { if (action?.loop) {
const { name: expectedItemName, type: expectedItemType } = const { name: expectedItemName, type: expectedItemType } =
@ -67,7 +75,7 @@ export class ActionLoop {
const resolveResolvers = ( const resolveResolvers = (
resolver: string, resolver: string,
intent: string intent: string
): [{ name: string, value: string }] => { ): [ResolveResolversResult] => {
const resolversPath = join( const resolversPath = join(
process.cwd(), process.cwd(),
'core', 'core',
@ -77,8 +85,10 @@ export class ActionLoop {
) )
// Load the skill resolver or the global resolver // Load the skill resolver or the global resolver
const resolvedIntents = !intent.includes('resolver.global') const resolvedIntents = !intent.includes('resolver.global')
? (resolvers && resolvers[resolver]) ? resolvers && resolvers[resolver]
: JSON.parse(fs.readFileSync(join(resolversPath, `${resolver}.json`), 'utf8')) : JSON.parse(
fs.readFileSync(join(resolversPath, `${resolver}.json`), 'utf8')
)
// E.g. resolver.global.denial -> denial // E.g. resolver.global.denial -> denial
intent = intent.substring(intent.lastIndexOf('.') + 1) intent = intent.substring(intent.lastIndexOf('.') + 1)
@ -144,8 +154,10 @@ export class ActionLoop {
// Break the action loop and prepare for the next action if necessary // Break the action loop and prepare for the next action if necessary
if (processedData.core?.isInActionLoop === false) { if (processedData.core?.isInActionLoop === false) {
NLU.conversation.activeContext.isInActionLoop = !!processedData.action?.loop NLU.conversation.activeContext.isInActionLoop =
NLU.conversation.activeContext.actionName = processedData.action?.next_action as string !!processedData.action?.loop
NLU.conversation.activeContext.actionName = processedData.action
?.next_action as string
NLU.conversation.activeContext.intent = `${processedData.classification?.skill}.${processedData.action?.next_action}` NLU.conversation.activeContext.intent = `${processedData.classification?.skill}.${processedData.action?.next_action}`
} }

View File

@ -51,9 +51,7 @@ export default class ModelLoader {
this.loadSkillsResolversModel( this.loadSkillsResolversModel(
path.join(MODELS_PATH, 'leon-skills-resolvers-model.nlp') path.join(MODELS_PATH, 'leon-skills-resolvers-model.nlp')
), ),
this.loadMainModel( this.loadMainModel(path.join(MODELS_PATH, 'leon-main-model.nlp'))
path.join(MODELS_PATH, 'leon-main-model.nlp')
)
]) ])
} }
@ -65,7 +63,11 @@ export default class ModelLoader {
if (!fs.existsSync(modelPath)) { if (!fs.existsSync(modelPath)) {
LogHelper.title('Model Loader') LogHelper.title('Model Loader')
reject(new Error('The global resolvers NLP model does not exist, please run: npm run train')) reject(
new Error(
'The global resolvers NLP model does not exist, please run: npm run train'
)
)
} else { } else {
LogHelper.title('Model Loader') LogHelper.title('Model Loader')
@ -84,7 +86,11 @@ export default class ModelLoader {
resolve() resolve()
} catch (e) { } catch (e) {
reject(new Error('An error occurred while loading the global resolvers NLP model')) reject(
new Error(
'An error occurred while loading the global resolvers NLP model'
)
)
} }
} }
}) })
@ -120,7 +126,11 @@ export default class ModelLoader {
resolve() resolve()
} catch (e) { } catch (e) {
reject(new Error('An error occurred while loading the skills resolvers NLP model')) reject(
new Error(
'An error occurred while loading the skills resolvers NLP model'
)
)
} }
} }
}) })
@ -165,7 +175,9 @@ export default class ModelLoader {
resolve() resolve()
} catch (e) { } catch (e) {
reject(new Error('An error occurred while loading the main NLP model')) reject(
new Error('An error occurred while loading the main NLP model')
)
} }
} }
}) })

View File

@ -1,5 +1,10 @@
import type { ShortLanguageCode } from '@/types' import type { ShortLanguageCode } from '@/types'
import type { NEREntity, NERSpacyEntity, NLPUtterance, NLUResult } from '@/core/nlp/types' import type {
NEREntity,
NERSpacyEntity,
NLPUtterance,
NLUResult
} from '@/core/nlp/types'
import type { import type {
SkillCustomEnumEntityTypeSchema, SkillCustomEnumEntityTypeSchema,
SkillCustomRegexEntityTypeSchema, SkillCustomRegexEntityTypeSchema,
@ -50,7 +55,9 @@ export default class NER {
LogHelper.success('Entities found:') LogHelper.success('Entities found:')
entities.forEach((entity) => entities.forEach((entity) =>
LogHelper.success(`{ value: ${entity.sourceText}, entity: ${entity.entity} }`) LogHelper.success(
`{ value: ${entity.sourceText}, entity: ${entity.entity} }`
)
) )
} }
@ -68,8 +75,13 @@ export default class NER {
const { classification } = nluResult const { classification } = nluResult
// Remove end-punctuation and add an end-whitespace // Remove end-punctuation and add an end-whitespace
const utterance = `${StringHelper.removeEndPunctuation(nluResult.utterance)} ` const utterance = `${StringHelper.removeEndPunctuation(
const { actions } = SkillDomainHelper.getSkillConfig(skillConfigPath, lang) nluResult.utterance
)} `
const { actions } = SkillDomainHelper.getSkillConfig(
skillConfigPath,
lang
)
const { action } = classification const { action } = classification
const promises = [] const promises = []
const actionEntities = actions[action]?.entities || [] const actionEntities = actions[action]?.entities || []
@ -92,10 +104,11 @@ export default class NER {
await Promise.all(promises) await Promise.all(promises)
const { entities }: { entities: NEREntity[] } = await this.manager.process({ const { entities }: { entities: NEREntity[] } =
locale: lang, await this.manager.process({
text: utterance locale: lang,
}) text: utterance
})
// Normalize entities // Normalize entities
entities.map((entity) => { entities.map((entity) => {
@ -148,10 +161,13 @@ export default class NER {
*/ */
private getSpacyEntities(utterance: NLPUtterance): Promise<NERSpacyEntity[]> { private getSpacyEntities(utterance: NLPUtterance): Promise<NERSpacyEntity[]> {
return new Promise((resolve) => { return new Promise((resolve) => {
const spacyEntitiesReceivedHandler = const spacyEntitiesReceivedHandler = async ({
async ({ spacyEntities }: { spacyEntities: NERSpacyEntity[] }): Promise<void> => { spacyEntities
resolve(spacyEntities) }: {
} spacyEntities: NERSpacyEntity[]
}): Promise<void> => {
resolve(spacyEntities)
}
TCP_CLIENT.ee.removeAllListeners() TCP_CLIENT.ee.removeAllListeners()
TCP_CLIENT.ee.on('spacy-entities-received', spacyEntitiesReceivedHandler) TCP_CLIENT.ee.on('spacy-entities-received', spacyEntitiesReceivedHandler)
@ -192,7 +208,11 @@ export default class NER {
options: {} options: {}
} }
this.manager.addRule(lang, entityConfig.name, 'trim', rule) this.manager.addRule(lang, entityConfig.name, 'trim', rule)
this.manager[conditionMethod](lang, entityConfig.name, condition?.from) this.manager[conditionMethod](
lang,
entityConfig.name,
condition?.from
)
} else if (condition.type.indexOf('before') !== -1) { } else if (condition.type.indexOf('before') !== -1) {
this.manager[conditionMethod](lang, entityConfig.name, condition.to) this.manager[conditionMethod](lang, entityConfig.name, condition.to)
} }
@ -210,7 +230,11 @@ export default class NER {
entityConfig: SkillCustomRegexEntityTypeSchema entityConfig: SkillCustomRegexEntityTypeSchema
): Promise<void> { ): Promise<void> {
return new Promise((resolve) => { return new Promise((resolve) => {
this.manager.addRegexRule(lang, entityConfig.name, new RegExp(entityConfig.regex, 'g')) this.manager.addRegexRule(
lang,
entityConfig.name,
new RegExp(entityConfig.regex, 'g')
)
resolve() resolve()
}) })

View File

@ -4,7 +4,14 @@ import { spawn } from 'node:child_process'
import kill from 'tree-kill' import kill from 'tree-kill'
import type { Language, ShortLanguageCode } from '@/types' import type { Language, ShortLanguageCode } from '@/types'
import type { NLPAction, NLPDomain, NLPJSProcessResult, NLPSkill, NLPUtterance, NLUResult } from '@/core/nlp/types' import type {
NLPAction,
NLPDomain,
NLPJSProcessResult,
NLPSkill,
NLPUtterance,
NLUResult
} from '@/core/nlp/types'
import type { BrainProcessResult } from '@/core/brain/types' import type { BrainProcessResult } from '@/core/brain/types'
import { langs } from '@@/core/langs.json' import { langs } from '@@/core/langs.json'
import { TCP_SERVER_BIN_PATH } from '@/constants' import { TCP_SERVER_BIN_PATH } from '@/constants'
@ -15,14 +22,12 @@ import { ActionLoop } from '@/core/nlp/nlu/action-loop'
import { SlotFilling } from '@/core/nlp/nlu/slot-filling' import { SlotFilling } from '@/core/nlp/nlu/slot-filling'
import Conversation, { DEFAULT_ACTIVE_CONTEXT } from '@/core/nlp/conversation' import Conversation, { DEFAULT_ACTIVE_CONTEXT } from '@/core/nlp/conversation'
type NLUProcessResult = Promise< type NLUProcessResult = Promise<Partial<
Partial< BrainProcessResult & {
BrainProcessResult & { processingTime: number
processingTime: number nluProcessingTime: number
nluProcessingTime: number }
} > | null>
> | null
>
export const DEFAULT_NLU_RESULT = { export const DEFAULT_NLU_RESULT = {
utterance: '', utterance: '',
@ -86,9 +91,7 @@ export default class NLU {
* pick-up the right classification * pick-up the right classification
* and extract entities * and extract entities
*/ */
public process( public process(utterance: NLPUtterance): NLUProcessResult {
utterance: NLPUtterance
): NLUProcessResult {
const processingTimeStart = Date.now() const processingTimeStart = Date.now()
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
@ -127,7 +130,8 @@ export default class NLU {
} }
} }
const result: NLPJSProcessResult = await MODEL_LOADER.mainNLPContainer.process(utterance) const result: NLPJSProcessResult =
await MODEL_LOADER.mainNLPContainer.process(utterance)
const { locale, answers, classifications } = result const { locale, answers, classifications } = result
let { score, intent, domain } = result let { score, intent, domain } = result
@ -141,7 +145,10 @@ export default class NLU {
classifications.forEach(({ intent: newIntent, score: newScore }) => { classifications.forEach(({ intent: newIntent, score: newScore }) => {
if (newScore > 0.6) { if (newScore > 0.6) {
const [skillName] = newIntent.split('.') const [skillName] = newIntent.split('.')
const newDomain = MODEL_LOADER.mainNLPContainer.getIntentDomain(locale, newIntent) const newDomain = MODEL_LOADER.mainNLPContainer.getIntentDomain(
locale,
newIntent
)
const contextName = `${newDomain}.${skillName}` const contextName = `${newDomain}.${skillName}`
if (this.conversation.activeContext.name === contextName) { if (this.conversation.activeContext.name === contextName) {
score = newScore score = newScore
@ -167,10 +174,7 @@ export default class NLU {
const isSupportedLanguage = LangHelper.getShortCodes().includes(locale) const isSupportedLanguage = LangHelper.getShortCodes().includes(locale)
if (!isSupportedLanguage) { if (!isSupportedLanguage) {
BRAIN.talk( BRAIN.talk(`${BRAIN.wernicke('random_language_not_supported')}.`, true)
`${BRAIN.wernicke('random_language_not_supported')}.`,
true
)
SOCKET_SERVER.socket.emit('is-typing', false) SOCKET_SERVER.socket.emit('is-typing', false)
return resolve({}) return resolve({})
} }
@ -190,10 +194,7 @@ export default class NLU {
if (!fallback) { if (!fallback) {
if (!BRAIN.isMuted) { if (!BRAIN.isMuted) {
BRAIN.talk( BRAIN.talk(`${BRAIN.wernicke('random_unknown_intents')}.`, true)
`${BRAIN.wernicke('random_unknown_intents')}.`,
true
)
SOCKET_SERVER.socket.emit('is-typing', false) SOCKET_SERVER.socket.emit('is-typing', false)
} }
@ -297,7 +298,8 @@ export default class NLU {
return resolve({ return resolve({
processingTime, // In ms, total time processingTime, // In ms, total time
...processedData, ...processedData,
nluProcessingTime: processingTime - (processedData?.executionTime || 0) // In ms, NLU processing time only nluProcessingTime:
processingTime - (processedData?.executionTime || 0) // In ms, NLU processing time only
}) })
} catch (e) { } catch (e) {
const errorMessage = `Failed to execute action: ${e}` const errorMessage = `Failed to execute action: ${e}`
@ -333,9 +335,11 @@ export default class NLU {
if (JSON.stringify(tmpWords) === JSON.stringify(fallbacks[i]?.words)) { if (JSON.stringify(tmpWords) === JSON.stringify(fallbacks[i]?.words)) {
this.nluResult.entities = [] this.nluResult.entities = []
this.nluResult.classification.domain = fallbacks[i]?.domain as NLPDomain this.nluResult.classification.domain = fallbacks[i]
?.domain as NLPDomain
this.nluResult.classification.skill = fallbacks[i]?.skill as NLPSkill this.nluResult.classification.skill = fallbacks[i]?.skill as NLPSkill
this.nluResult.classification.action = fallbacks[i]?.action as NLPAction this.nluResult.classification.action = fallbacks[i]
?.action as NLPAction
this.nluResult.classification.confidence = 1 this.nluResult.classification.confidence = 1
LogHelper.success('Fallback found') LogHelper.success('Fallback found')

View File

@ -11,7 +11,9 @@ export class SlotFilling {
/** /**
* Handle slot filling * Handle slot filling
*/ */
public static async handle(utterance: NLPUtterance): Promise<Partial<BrainProcessResult> | null> { public static async handle(
utterance: NLPUtterance
): Promise<Partial<BrainProcessResult> | null> {
const processedData = await this.fillSlot(utterance) const processedData = await this.fillSlot(utterance)
/** /**
@ -48,7 +50,9 @@ export class SlotFilling {
* Build NLU data result object based on slots * Build NLU data result object based on slots
* and ask for more entities if necessary * and ask for more entities if necessary
*/ */
public static async fillSlot(utterance: NLPUtterance): Promise<Partial<BrainProcessResult> | null> { public static async fillSlot(
utterance: NLPUtterance
): Promise<Partial<BrainProcessResult> | null> {
if (!NLU.conversation.activeContext.nextAction) { if (!NLU.conversation.activeContext.nextAction) {
return null return null
} }
@ -136,7 +140,8 @@ export class SlotFilling {
* 3. Or go to the brain executor if all slots have been filled in one shot * 3. Or go to the brain executor if all slots have been filled in one shot
*/ */
public static async route(intent: string): Promise<boolean> { public static async route(intent: string): Promise<boolean> {
const slots = await MODEL_LOADER.mainNLPContainer.slotManager.getMandatorySlots(intent) const slots =
await MODEL_LOADER.mainNLPContainer.slotManager.getMandatorySlots(intent)
const hasMandatorySlots = Object.keys(slots)?.length > 0 const hasMandatorySlots = Object.keys(slots)?.length > 0
if (hasMandatorySlots) { if (hasMandatorySlots) {
@ -156,10 +161,14 @@ export class SlotFilling {
const notFilledSlot = NLU.conversation.getNotFilledSlot() const notFilledSlot = NLU.conversation.getNotFilledSlot()
// Loop for questions if a slot hasn't been filled // Loop for questions if a slot hasn't been filled
if (notFilledSlot) { if (notFilledSlot) {
const { actions } = SkillDomainHelper.getSkillConfig(NLU.nluResult.skillConfigPath, BRAIN.lang) const { actions } = SkillDomainHelper.getSkillConfig(
const [currentSlot] = actions[ NLU.nluResult.skillConfigPath,
NLU.nluResult.classification.action BRAIN.lang
]?.slots?.filter(({ name }) => name === notFilledSlot.name) ?? [] )
const [currentSlot] =
actions[NLU.nluResult.classification.action]?.slots?.filter(
({ name }) => name === notFilledSlot.name
) ?? []
SOCKET_SERVER.socket.emit('suggest', currentSlot?.suggestions) SOCKET_SERVER.socket.emit('suggest', currentSlot?.suggestions)
BRAIN.talk(notFilledSlot.pickedQuestion) BRAIN.talk(notFilledSlot.pickedQuestion)

View File

@ -275,8 +275,14 @@ export interface CustomRegexEntity extends CustomEntity<'regex'> {
} }
} }
interface CustomTrimEntity extends CustomEntity<'trim'> { interface CustomTrimEntity extends CustomEntity<'trim'> {
subtype: 'between' | 'after' | 'afterFirst' | 'afterLast' subtype:
| 'before' | 'beforeFirst' | 'beforeLast' | 'between'
| 'after'
| 'afterFirst'
| 'afterLast'
| 'before'
| 'beforeFirst'
| 'beforeLast'
resolution: { resolution: {
value: string value: string
} }

View File

@ -6,13 +6,17 @@ import type { Socket } from 'node:net'
import { Server as SocketIOServer } from 'socket.io' import { Server as SocketIOServer } from 'socket.io'
import { LANG, HAS_STT, HAS_TTS, IS_DEVELOPMENT_ENV } from '@/constants'
import { import {
LANG, HTTP_SERVER,
HAS_STT, TCP_CLIENT,
HAS_TTS, ASR,
IS_DEVELOPMENT_ENV STT,
} from '@/constants' TTS,
import { HTTP_SERVER, TCP_CLIENT, ASR, STT, TTS, NLU, BRAIN, MODEL_LOADER } from '@/core' NLU,
BRAIN,
MODEL_LOADER
} from '@/core'
import { LogHelper } from '@/helpers/log-helper' import { LogHelper } from '@/helpers/log-helper'
import { LangHelper } from '@/helpers/lang-helper' import { LangHelper } from '@/helpers/lang-helper'
@ -48,9 +52,7 @@ export default class SocketServer {
if (HAS_TTS) { if (HAS_TTS) {
ttsState = 'enabled' ttsState = 'enabled'
await TTS.init( await TTS.init(LangHelper.getShortCode(LANG))
LangHelper.getShortCode(LANG)
)
} }
LogHelper.title('Initialization') LogHelper.title('Initialization')
@ -121,7 +123,9 @@ export default class SocketServer {
try { try {
await ASR.encode(data) await ASR.encode(data)
} catch (e) { } catch (e) {
LogHelper.error(`ASR - Failed to encode audio blob to WAVE file: ${e.stack}`) LogHelper.error(
`ASR - Failed to encode audio blob to WAVE file: ${e.stack}`
)
} }
}) })
} }

View File

@ -11,6 +11,11 @@ const INTERVAL = IS_PRODUCTION_ENV ? 3000 : 500
// Number of retries to connect to the TCP server // Number of retries to connect to the TCP server
const RETRIES_NB = IS_PRODUCTION_ENV ? 8 : 30 const RETRIES_NB = IS_PRODUCTION_ENV ? 8 : 30
interface ChunkData {
topic: string
data: unknown
}
export default class TCPClient { export default class TCPClient {
private static instance: TCPClient private static instance: TCPClient
@ -50,7 +55,7 @@ export default class TCPClient {
this.ee.emit('connected', null) this.ee.emit('connected', null)
}) })
this.tcpSocket.on('data', (chunk: { topic: string, data: unknown }) => { this.tcpSocket.on('data', (chunk: ChunkData) => {
LogHelper.title('TCP Client') LogHelper.title('TCP Client')
LogHelper.info(`Received data: ${String(chunk)}`) LogHelper.info(`Received data: ${String(chunk)}`)

View File

@ -73,7 +73,11 @@ export default class GoogleCloudTTSSynthesizer extends TTSSynthesizerBase {
} }
}) })
await fs.promises.writeFile(audioFilePath, response.audioContent as Uint8Array | string, 'binary') await fs.promises.writeFile(
audioFilePath,
response.audioContent as Uint8Array | string,
'binary'
)
const duration = await this.getAudioDuration(audioFilePath) const duration = await this.getAudioDuration(audioFilePath)

View File

@ -10,7 +10,9 @@ export abstract class TTSSynthesizerBase {
protected abstract name: string protected abstract name: string
protected abstract lang: LongLanguageCode protected abstract lang: LongLanguageCode
protected abstract synthesize(speech: string): Promise<SynthesizeResult | null> protected abstract synthesize(
speech: string
): Promise<SynthesizeResult | null>
protected async getAudioDuration(audioFilePath: string): Promise<number> { protected async getAudioDuration(audioFilePath: string): Promise<number> {
ffmpeg.setFfmpegPath(ffmpegPath) ffmpeg.setFfmpegPath(ffmpegPath)

View File

@ -10,7 +10,7 @@ import { TTSSynthesizers, TTSProviders } from '@/core/tts/types'
import { LogHelper } from '@/helpers/log-helper' import { LogHelper } from '@/helpers/log-helper'
import { LangHelper } from '@/helpers/lang-helper' import { LangHelper } from '@/helpers/lang-helper'
type Speech = { interface Speech {
text: string text: string
isFinalAnswer: boolean isFinalAnswer: boolean
} }
@ -150,7 +150,10 @@ export default class TTS {
/** /**
* Add speeches to the queue * Add speeches to the queue
*/ */
public async add(text: Speech['text'], isFinalAnswer: Speech['isFinalAnswer']): Promise<Speech[]> { public async add(
text: Speech['text'],
isFinalAnswer: Speech['isFinalAnswer']
): Promise<Speech[]> {
/** /**
* Flite fix. When the string is only one word, * Flite fix. When the string is only one word,
* Flite cannot save to a file. So we add a space at the end of the string * Flite cannot save to a file. So we add a space at the end of the string

View File

@ -17,9 +17,14 @@ export enum TTSSynthesizers {
Flite = 'flite-synthesizer' Flite = 'flite-synthesizer'
} }
export type SynthesizeResult = { export interface SynthesizeResult {
audioFilePath: string audioFilePath: string
duration: number duration: number
} }
export type TTSSynthesizer = AmazonPollySynthesizer | FliteSynthesizer | GoogleCloudTTSSynthesizer | WatsonTTSSynthesizer | undefined export type TTSSynthesizer =
| AmazonPollySynthesizer
| FliteSynthesizer
| GoogleCloudTTSSynthesizer
| WatsonTTSSynthesizer
| undefined

View File

@ -9,10 +9,10 @@ declare module '@ffprobe-installer/ffprobe' {
* @see https://github.com/axa-group/nlp.js/tree/master/packages * @see https://github.com/axa-group/nlp.js/tree/master/packages
*/ */
interface BuiltinMicrosoft<T> { interface BuiltinMicrosoft<T> {
new(settings: unknown, container: unknown): T new (settings: unknown, container: unknown): T
} }
interface Nlp<T> { interface Nlp<T> {
new(settings: unknown, container: unknown): T new (settings: unknown, container: unknown): T
} }
interface LangAll { interface LangAll {
register(container: unknown) register(container: unknown)

View File

@ -17,9 +17,7 @@ export class LangHelper {
* @param shortCode The short language code of the language * @param shortCode The short language code of the language
* @example getLongCode('en') // en-US * @example getLongCode('en') // en-US
*/ */
public static getLongCode( public static getLongCode(shortCode: ShortLanguageCode): LongLanguageCode {
shortCode: ShortLanguageCode
): LongLanguageCode {
for (const longLanguage in langs) { for (const longLanguage in langs) {
const longLanguageType = longLanguage as LongLanguageCode const longLanguageType = longLanguage as LongLanguageCode
const lang = langs[longLanguageType] const lang = langs[longLanguageType]

View File

@ -22,7 +22,8 @@ interface SkillDomain {
} }
} }
interface SkillConfigWithGlobalEntities extends Omit<SkillConfigSchema, 'entities'> { interface SkillConfigWithGlobalEntities
extends Omit<SkillConfigSchema, 'entities'> {
entities: Record<string, GlobalEntitySchema> entities: Record<string, GlobalEntitySchema>
} }
@ -138,7 +139,9 @@ export class SkillDomainHelper {
encoding: 'utf8' encoding: 'utf8'
}) })
result.entities[entity] = JSON.parse(entityRawData) as GlobalEntitySchema result.entities[entity] = JSON.parse(
entityRawData
) as GlobalEntitySchema
} }
}) })

View File

@ -192,6 +192,12 @@ export type DomainSchema = Static<typeof domainSchemaObject>
export type SkillSchema = Static<typeof skillSchemaObject> export type SkillSchema = Static<typeof skillSchemaObject>
export type SkillConfigSchema = Static<typeof skillConfigSchemaObject> export type SkillConfigSchema = Static<typeof skillConfigSchemaObject>
export type SkillBridgeSchema = Static<typeof skillSchemaObject.bridge> export type SkillBridgeSchema = Static<typeof skillSchemaObject.bridge>
export type SkillCustomTrimEntityTypeSchema = Static<typeof skillCustomTrimEntityType> export type SkillCustomTrimEntityTypeSchema = Static<
export type SkillCustomRegexEntityTypeSchema = Static<typeof skillCustomRegexEntityType> typeof skillCustomTrimEntityType
export type SkillCustomEnumEntityTypeSchema = Static<typeof skillCustomEnumEntityType> >
export type SkillCustomRegexEntityTypeSchema = Static<
typeof skillCustomRegexEntityType
>
export type SkillCustomEnumEntityTypeSchema = Static<
typeof skillCustomEnumEntityType
>

View File

@ -40,11 +40,15 @@ export const watsonVoiceConfiguration = Type.Strict(
) )
) )
export type AmazonVoiceConfigurationSchema = Static<typeof amazonVoiceConfiguration> export type AmazonVoiceConfigurationSchema = Static<
typeof amazonVoiceConfiguration
>
export type GoogleCloudVoiceConfigurationSchema = Static< export type GoogleCloudVoiceConfigurationSchema = Static<
typeof googleCloudVoiceConfiguration typeof googleCloudVoiceConfiguration
> >
export type WatsonVoiceConfigurationSchema = Static<typeof watsonVoiceConfiguration> export type WatsonVoiceConfigurationSchema = Static<
typeof watsonVoiceConfiguration
>
export type VoiceConfigurationSchema = export type VoiceConfigurationSchema =
| AmazonVoiceConfigurationSchema | AmazonVoiceConfigurationSchema
| GoogleCloudVoiceConfigurationSchema | GoogleCloudVoiceConfigurationSchema

View File

@ -41,7 +41,10 @@ class Tts {
) { ) {
process.env.GOOGLE_APPLICATION_CREDENTIALS = path.join( process.env.GOOGLE_APPLICATION_CREDENTIALS = path.join(
process.cwd(), process.cwd(),
'core', 'config', 'voice', 'google-cloud.json' 'core',
'config',
'voice',
'google-cloud.json'
) )
} else if ( } else if (
typeof process.env.GOOGLE_APPLICATION_CREDENTIALS !== 'undefined' && typeof process.env.GOOGLE_APPLICATION_CREDENTIALS !== 'undefined' &&

View File

@ -68,7 +68,12 @@
"answer": { "answer": {
"intents": { "intents": {
"yes": { "yes": {
"utterance_samples": ["[Yes|Yep|Yup|Yeah]", "Of course", "Sure", "Correct"], "utterance_samples": [
"[Yes|Yep|Yup|Yeah]",
"Of course",
"Sure",
"Correct"
],
"value": "y" "value": "y"
}, },
"no": { "no": {