1
1
mirror of https://github.com/leon-ai/leon.git synced 2024-11-23 20:12:08 +03:00

feat(server): introduce main NLP model and resolvers NLP model

This commit is contained in:
louistiti 2022-06-30 22:35:54 +08:00
parent 899676110a
commit e37526d905
No known key found for this signature in database
GPG Key ID: 7ECA3DD523793FE6
13 changed files with 383 additions and 245 deletions

2
.gitignore vendored
View File

@ -25,6 +25,6 @@ packages/**/config/config.json
skills/**/src/config.json
packages/**/data/db/*.json
skills/**/memory/*.json
core/data/leon-model.nlp
core/data/models/*.nlp
package.json.backup
.python-version

View File

View File

@ -48,7 +48,7 @@
"build:server": "npm run delete-dist:server && npm run train && npm run generate:skills-endpoints && babel ./server/src -d ./server/dist --copy-files && shx mkdir -p server/dist/tmp",
"start:tcp-server": "cross-env PIPENV_PIPFILE=bridges/python/Pipfile pipenv run python bridges/python/tcp_server/main.py",
"start": "cross-env LEON_NODE_ENV=production node ./server/dist/index.js",
"train": "babel-node scripts/run-train.js",
"train": "babel-node scripts/train/run-train.js",
"prepare-release": "babel-node scripts/release/prepare-release.js",
"check": "babel-node scripts/run-check.js",
"docker:build": "docker build -t leonai/leon .",

View File

@ -22,7 +22,8 @@ export default () => new Promise(async (resolve, reject) => {
const googleCloudPath = 'core/config/voice/google-cloud.json'
const watsonSttPath = 'core/config/voice/watson-stt.json'
const watsonTtsPath = 'core/config/voice/watson-tts.json'
const nlpModelPath = 'core/data/leon-model.nlp'
const resolversNlpModelPath = 'core/data/models/leon-resolvers-model.nlp'
const mainNlpModelPath = 'core/data/models/leon-main-model.nlp'
const report = {
can_run: { title: 'Run', type: 'error', v: true },
can_run_module: { title: 'Run modules', type: 'error', v: true },
@ -87,13 +88,26 @@ export default () => new Promise(async (resolve, reject) => {
log.error(`${e}\n`)
}
// NLP model checking
// Resolvers NLP model checking
log.info('NLP model state')
if (!fs.existsSync(nlpModelPath) || !Object.keys(fs.readFileSync(nlpModelPath)).length) {
log.info('Resolvers NLP model state')
if (!fs.existsSync(resolversNlpModelPath)
|| !Object.keys(fs.readFileSync(resolversNlpModelPath)).length) {
report.can_text.v = false
Object.keys(report).forEach((item) => { if (item.indexOf('stt') !== -1 || item.indexOf('tts') !== -1) report[item].v = false })
log.error('NLP model not found or broken. Try to generate a new one: "npm run train"\n')
log.error('Resolvers NLP model not found or broken. Try to generate a new one: "npm run train"\n')
} else {
log.success('Found and valid\n')
}
// Main NLP model checking
log.info('Main NLP model state')
if (!fs.existsSync(mainNlpModelPath)
|| !Object.keys(fs.readFileSync(mainNlpModelPath)).length) {
report.can_text.v = false
Object.keys(report).forEach((item) => { if (item.indexOf('stt') !== -1 || item.indexOf('tts') !== -1) report[item].v = false })
log.error('Main NLP model not found or broken. Try to generate a new one: "npm run train"\n')
} else {
log.success('Found and valid\n')
}

View File

@ -1,7 +1,7 @@
import loader from '@/helpers/loader'
import log from '@/helpers/log'
import train from '../train'
import train from '../train/train'
import generateHttpApiKey from '../generate/generate-http-api-key'
import setupDotenv from './setup-dotenv'
import setupCore from './setup-core'

View File

@ -1,213 +0,0 @@
import { containerBootstrap } from '@nlpjs/core-loader'
import { Nlp } from '@nlpjs/nlp'
import { composeFromPattern } from '@nlpjs/utils'
import { LangAll } from '@nlpjs/lang-all'
import dotenv from 'dotenv'
import fs from 'fs'
import path from 'path'
import log from '@/helpers/log'
import lang from '@/helpers/lang'
import domain from '@/helpers/domain'
import string from '@/helpers/string'
import json from '@/helpers/json'
dotenv.config()
/**
* Training utterance samples script
*
* npm run train [en or fr]
*/
export default () => new Promise(async (resolve, reject) => {
const modelFileName = 'core/data/leon-model.nlp'
const supportedActionTypes = ['dialog', 'logic']
try {
const container = await containerBootstrap()
container.use(Nlp)
container.use(LangAll)
const nlp = container.get('nlp')
const nluManager = container.get('nlu-manager')
// const slotManager = container.get('SlotManager')
nluManager.settings.log = false
nluManager.settings.trainByDomain = true
// slotManager.settings.
nlp.settings.forceNER = true // https://github.com/axa-group/nlp.js/blob/master/examples/17-ner-nlg/index.js
// nlp.settings.nlu = { useNoneFeature: true }
nlp.settings.calculateSentiment = true
nlp.settings.modelFileName = modelFileName
nlp.settings.threshold = 0.8
const [domainKeys, domains] = await Promise.all([domain.list(), domain.getDomainsObj()])
const shortLangs = lang.getShortLangs()
for (let h = 0; h < shortLangs.length; h += 1) {
const lang = shortLangs[h]
const globalEntitiesPath = path.join(process.cwd(), 'core/data', lang, 'global-entities')
const globalEntityFiles = fs.readdirSync(globalEntitiesPath)
const resolversPath = path.join(process.cwd(), 'core/data', lang, 'resolvers')
const resolverFiles = fs.readdirSync(resolversPath)
const newEntitiesObj = { }
nlp.addLanguage(lang)
// Add global entities annotations (@...)
for (let i = 0; i < globalEntityFiles.length; i += 1) {
const globalEntityFileName = globalEntityFiles[i]
const [entityName] = globalEntityFileName.split('.')
const globalEntityPath = path.join(globalEntitiesPath, globalEntityFileName)
const { options } = JSON.parse(fs.readFileSync(globalEntityPath, 'utf8'))
const optionKeys = Object.keys(options)
const optionsObj = { }
log.info(`[${lang}] Adding "${entityName}" global entity...`)
optionKeys.forEach((optionKey) => {
const { synonyms } = options[optionKey]
optionsObj[optionKey] = synonyms
})
newEntitiesObj[entityName] = { options: optionsObj }
log.success(`[${lang}] "${entityName}" global entity added`)
}
nlp.addEntities(newEntitiesObj, lang)
// Train resolvers
for (let i = 0; i < resolverFiles.length; i += 1) {
const resolverFileName = resolverFiles[i]
const resolverPath = path.join(resolversPath, resolverFileName)
const { name: resolverName, intents: resolverIntents } = JSON.parse(fs.readFileSync(resolverPath, 'utf8'))
const intentKeys = Object.keys(resolverIntents)
log.info(`[${lang}] Training "${resolverName}" resolver...`)
for (let j = 0; j < intentKeys.length; j += 1) {
const intentName = intentKeys[j]
const intentObj = resolverIntents[intentName]
nlp.assignDomain(lang, intentName, 'system')
for (let k = 0; k < intentObj.utterance_samples.length; k += 1) {
nlp.addDocument(lang, intentObj.utterance_samples[k], intentName)
}
}
log.success(`[${lang}] "${resolverName}" resolver trained`)
}
// Train skills actions
for (let i = 0; i < domainKeys.length; i += 1) {
const currentDomain = domains[domainKeys[i]]
const skillKeys = Object.keys(currentDomain.skills)
log.info(`[${lang}] Training "${domainKeys[i]}" domain model...`)
for (let j = 0; j < skillKeys.length; j += 1) {
const { name: skillName } = currentDomain.skills[skillKeys[j]]
const currentSkill = currentDomain.skills[skillKeys[j]]
log.info(`[${lang}] Using "${skillKeys[j]}" skill NLU data`)
const nluFilePath = path.join(currentSkill.path, 'nlu', `${lang}.json`)
if (fs.existsSync(nluFilePath)) {
const {
actions,
variables
} = await json.loadNluData(nluFilePath, lang) // eslint-disable-line no-await-in-loop
const actionsKeys = Object.keys(actions)
for (let k = 0; k < actionsKeys.length; k += 1) {
const actionName = actionsKeys[k]
const actionObj = actions[actionName]
const intent = `${skillName}.${actionName}`
const { utterance_samples: utteranceSamples, answers, slots } = actionObj
if (!actionObj.type || !supportedActionTypes.includes(actionObj.type)) {
log.error(`This action type isn't supported: ${actionObj.type}`)
process.exit(1)
}
nlp.assignDomain(lang, `${skillName}.${actionName}`, currentDomain.name)
if (slots) {
for (let l = 0; l < slots.length; l += 1) {
const slotObj = slots[l]
/**
* TODO: handle entity within questions such as "Where does {{ hero }} live?"
* https://github.com/axa-group/nlp.js/issues/328
* https://github.com/axa-group/nlp.js/issues/291
* https://github.com/axa-group/nlp.js/issues/307
*/
if (slotObj.item.type === 'entity') {
nlp.slotManager
.addSlot(intent, `${slotObj.name}#${slotObj.item.name}`, true, { [lang]: slotObj.questions })
}
/* nlp.slotManager
.addSlot(intent, 'boolean', true, { [lang]: 'How many players?' }) */
}
}
for (let l = 0; l < utteranceSamples?.length; l += 1) {
const utterance = utteranceSamples[l]
// Achieve Cartesian training
const utteranceAlternatives = composeFromPattern(utterance)
utteranceAlternatives.forEach((utteranceAlternative) => {
nlp.addDocument(lang, utteranceAlternative, intent)
})
}
// Train NLG if the action has a dialog type
if (actionObj.type === 'dialog') {
const variablesObj = { }
// Dynamic variables binding if any variable is declared
if (variables) {
const variableKeys = Object.keys(variables)
for (let l = 0; l < variableKeys.length; l += 1) {
const key = variableKeys[l]
variablesObj[`%${key}%`] = variables[variableKeys[l]]
}
}
for (let l = 0; l < answers?.length; l += 1) {
const variableKeys = Object.keys(variablesObj)
if (variableKeys.length > 0) {
answers[l] = string.pnr(answers[l], variablesObj)
}
nlp.addAnswer(lang, `${skillName}.${actionName}`, answers[l])
}
}
}
}
}
log.success(`[${lang}] "${domainKeys[i]}" domain trained`)
}
}
try {
await nlp.train()
log.success(`NLP model saved in ${modelFileName}`)
resolve()
} catch (e) {
log.error(`Failed to save NLP model: ${e}`)
reject()
}
} catch (e) {
log.error(e.message)
reject(e)
}
})

View File

@ -3,7 +3,7 @@ import log from '@/helpers/log'
import train from './train'
/**
* Execute the training script
* Execute the training scripts
*/
(async () => {
try {

View File

@ -0,0 +1,40 @@
import path from 'path'
import fs from 'fs'
import log from '@/helpers/log'
/**
* Train global entities
*/
export default (lang, nlp) => new Promise((resolve) => {
log.title('Global entities training')
const globalEntitiesPath = path.join(process.cwd(), 'core/data', lang, 'global-entities')
const globalEntityFiles = fs.readdirSync(globalEntitiesPath)
const newEntitiesObj = { }
// Add global entities annotations (@...)
for (let i = 0; i < globalEntityFiles.length; i += 1) {
const globalEntityFileName = globalEntityFiles[i]
const [entityName] = globalEntityFileName.split('.')
const globalEntityPath = path.join(globalEntitiesPath, globalEntityFileName)
const { options } = JSON.parse(fs.readFileSync(globalEntityPath, 'utf8'))
const optionKeys = Object.keys(options)
const optionsObj = { }
log.info(`[${lang}] Adding "${entityName}" global entity...`)
optionKeys.forEach((optionKey) => {
const { synonyms } = options[optionKey]
optionsObj[optionKey] = synonyms
})
newEntitiesObj[entityName] = { options: optionsObj }
log.success(`[${lang}] "${entityName}" global entity added`)
}
nlp.addEntities(newEntitiesObj, lang)
resolve()
})

View File

@ -0,0 +1,115 @@
import path from 'path'
import fs from 'fs'
import { composeFromPattern } from '@nlpjs/utils'
import log from '@/helpers/log'
import json from '@/helpers/json'
import string from '@/helpers/string'
import domain from '@/helpers/domain'
/**
* Train global entities
*/
export default (lang, nlp) => new Promise(async (resolve) => {
log.title('Skills actions training')
const supportedActionTypes = ['dialog', 'logic']
const [domainKeys, domains] = await Promise.all([domain.list(), domain.getDomainsObj()])
// Train skills actions
for (let i = 0; i < domainKeys.length; i += 1) {
const currentDomain = domains[domainKeys[i]]
const skillKeys = Object.keys(currentDomain.skills)
log.info(`[${lang}] Training "${domainKeys[i]}" domain model...`)
for (let j = 0; j < skillKeys.length; j += 1) {
const { name: skillName } = currentDomain.skills[skillKeys[j]]
const currentSkill = currentDomain.skills[skillKeys[j]]
log.info(`[${lang}] Using "${skillKeys[j]}" skill NLU data`)
const nluFilePath = path.join(currentSkill.path, 'nlu', `${lang}.json`)
if (fs.existsSync(nluFilePath)) {
const {
actions,
variables
} = await json.loadNluData(nluFilePath, lang) // eslint-disable-line no-await-in-loop
const actionsKeys = Object.keys(actions)
for (let k = 0; k < actionsKeys.length; k += 1) {
const actionName = actionsKeys[k]
const actionObj = actions[actionName]
const intent = `${skillName}.${actionName}`
const { utterance_samples: utteranceSamples, answers, slots } = actionObj
if (!actionObj.type || !supportedActionTypes.includes(actionObj.type)) {
log.error(`This action type isn't supported: ${actionObj.type}`)
process.exit(1)
}
nlp.assignDomain(lang, `${skillName}.${actionName}`, currentDomain.name)
if (slots) {
for (let l = 0; l < slots.length; l += 1) {
const slotObj = slots[l]
/**
* TODO: handle entity within questions such as "Where does {{ hero }} live?"
* https://github.com/axa-group/nlp.js/issues/328
* https://github.com/axa-group/nlp.js/issues/291
* https://github.com/axa-group/nlp.js/issues/307
*/
if (slotObj.item.type === 'entity') {
nlp.slotManager
.addSlot(intent, `${slotObj.name}#${slotObj.item.name}`, true, { [lang]: slotObj.questions })
}
/* nlp.slotManager
.addSlot(intent, 'boolean', true, { [lang]: 'How many players?' }) */
}
}
for (let l = 0; l < utteranceSamples?.length; l += 1) {
const utterance = utteranceSamples[l]
// Achieve Cartesian training
const utteranceAlternatives = composeFromPattern(utterance)
utteranceAlternatives.forEach((utteranceAlternative) => {
nlp.addDocument(lang, utteranceAlternative, intent)
})
}
// Train NLG if the action has a dialog type
if (actionObj.type === 'dialog') {
const variablesObj = { }
// Dynamic variables binding if any variable is declared
if (variables) {
const variableKeys = Object.keys(variables)
for (let l = 0; l < variableKeys.length; l += 1) {
const key = variableKeys[l]
variablesObj[`%${key}%`] = variables[variableKeys[l]]
}
}
for (let l = 0; l < answers?.length; l += 1) {
const variableKeys = Object.keys(variablesObj)
if (variableKeys.length > 0) {
answers[l] = string.pnr(answers[l], variablesObj)
}
nlp.addAnswer(lang, `${skillName}.${actionName}`, answers[l])
}
}
}
}
}
log.success(`[${lang}] "${domainKeys[i]}" domain trained`)
}
resolve()
})

View File

@ -0,0 +1,40 @@
import path from 'path'
import fs from 'fs'
import log from '@/helpers/log'
/**
* Train global entities
*/
export default (lang, nlp) => new Promise((resolve) => {
log.title('Global resolvers training')
const resolversPath = path.join(process.cwd(), 'core/data', lang, 'resolvers')
const resolverFiles = fs.readdirSync(resolversPath)
// Add global entities annotations (@...)
// Train resolvers
for (let i = 0; i < resolverFiles.length; i += 1) {
const resolverFileName = resolverFiles[i]
const resolverPath = path.join(resolversPath, resolverFileName)
const { name: resolverName, intents: resolverIntents } = JSON.parse(fs.readFileSync(resolverPath, 'utf8'))
const intentKeys = Object.keys(resolverIntents)
log.info(`[${lang}] Training "${resolverName}" resolver...`)
for (let j = 0; j < intentKeys.length; j += 1) {
const intentName = intentKeys[j]
const intentObj = resolverIntents[intentName]
nlp.assignDomain(lang, intentName, 'system')
for (let k = 0; k < intentObj.utterance_samples.length; k += 1) {
nlp.addDocument(lang, intentObj.utterance_samples[k], intentName)
}
}
log.success(`[${lang}] "${resolverName}" resolver trained`)
}
resolve()
})

102
scripts/train/train.js Normal file
View File

@ -0,0 +1,102 @@
import { containerBootstrap } from '@nlpjs/core-loader'
import { Nlp } from '@nlpjs/nlp'
import { LangAll } from '@nlpjs/lang-all'
import dotenv from 'dotenv'
import log from '@/helpers/log'
import lang from '@/helpers/lang'
import trainGlobalResolvers from './train-resolvers-model/train-global-resolvers'
import trainGlobalEntities from './train-main-model/train-global-entities'
import trainSkillsActions from './train-main-model/train-skills-actions'
dotenv.config()
/**
* Training utterance samples script
*
* npm run train [en or fr]
*/
export default () => new Promise(async (resolve, reject) => {
const resolversModelFileName = 'core/data/models/leon-resolvers-model.nlp'
const mainModelFileName = 'core/data/models/leon-main-model.nlp'
try {
/**
* Resolvers NLP model configuration
*/
const resolversContainer = await containerBootstrap()
resolversContainer.use(Nlp)
resolversContainer.use(LangAll)
const resolversNlp = resolversContainer.get('nlp')
const resolversNluManager = resolversContainer.get('nlu-manager')
resolversNluManager.settings.log = false
resolversNluManager.settings.trainByDomain = true
resolversNlp.settings.modelFileName = resolversModelFileName
resolversNlp.settings.threshold = 0.8
/**
* Main NLP model configuration
*/
const mainContainer = await containerBootstrap()
mainContainer.use(Nlp)
mainContainer.use(LangAll)
const mainNlp = mainContainer.get('nlp')
const mainNluManager = mainContainer.get('nlu-manager')
// const mainSlotManager = container.get('SlotManager')
mainNluManager.settings.log = false
mainNluManager.settings.trainByDomain = true
// mainSlotManager.settings.
mainNlp.settings.forceNER = true // https://github.com/axa-group/nlp.js/blob/master/examples/17-ner-nlg/index.js
// mainNlp.settings.nlu = { useNoneFeature: true }
mainNlp.settings.calculateSentiment = true
mainNlp.settings.modelFileName = mainModelFileName
mainNlp.settings.threshold = 0.8
/**
* Training phases
*/
const shortLangs = lang.getShortLangs()
for (let h = 0; h < shortLangs.length; h += 1) {
const lang = shortLangs[h]
resolversNlp.addLanguage(lang)
// eslint-disable-next-line no-await-in-loop
await trainGlobalResolvers(lang, resolversNlp)
mainNlp.addLanguage(lang)
// eslint-disable-next-line no-await-in-loop
await trainGlobalEntities(lang, mainNlp)
// eslint-disable-next-line no-await-in-loop
await trainSkillsActions(lang, mainNlp)
}
try {
await resolversNlp.train()
log.success(`Resolvers NLP model saved in ${resolversModelFileName}`)
resolve()
} catch (e) {
log.error(`Failed to save resolvers NLP model: ${e}`)
reject()
}
try {
await mainNlp.train()
log.success(`Main NLP model saved in ${mainModelFileName}`)
resolve()
} catch (e) {
log.error(`Failed to save main NLP model: ${e}`)
reject()
}
} catch (e) {
log.error(e.message)
reject(e)
}
})

View File

@ -294,9 +294,12 @@ server.init = async () => {
nlu = new Nlu(brain)
// Load NLP model
// Load NLP models
try {
await nlu.loadModel(join(process.cwd(), 'core/data/leon-model.nlp'))
await Promise.all([
nlu.loadResolversModel(join(process.cwd(), 'core/data/models/leon-resolvers-model.nlp')),
nlu.loadMainModel(join(process.cwd(), 'core/data/models/leon-main-model.nlp'))
])
} catch (e) {
log[e.type](e.obj.message)
}

View File

@ -38,7 +38,8 @@ class Nlu {
constructor (brain) {
this.brain = brain
this.request = request
this.nlp = { }
this.resolversNlp = { }
this.mainNlp = { }
this.ner = { }
this.conv = new Conversation('conv0')
this.nluResultObj = defaultNluResultObj // TODO
@ -48,33 +49,28 @@ class Nlu {
}
/**
* Load the NLP model from the latest training
* Load the resolvers NLP model from the latest training
*/
loadModel (nlpModel) {
loadResolversModel (nlpModel) {
return new Promise(async (resolve, reject) => {
if (!fs.existsSync(nlpModel)) {
log.title('NLU')
reject({ type: 'warning', obj: new Error('The NLP model does not exist, please run: npm run train') })
reject({ type: 'warning', obj: new Error('The resolvers NLP model does not exist, please run: npm run train') })
} else {
log.title('NLU')
try {
const container = await containerBootstrap()
container.register('extract-builtin-??', new BuiltinMicrosoft({
builtins: Ner.getMicrosoftBuiltinEntities()
}), true)
container.use(Nlp)
container.use(LangAll)
this.nlp = container.get('nlp')
this.resolversNlp = container.get('nlp')
const nluManager = container.get('nlu-manager')
nluManager.settings.spellCheck = true
await this.nlp.load(nlpModel)
log.success('NLP model loaded')
this.ner = new Ner(this.nlp.ner)
await this.resolversNlp.load(nlpModel)
log.success('Resolvers NLP model loaded')
resolve()
} catch (err) {
@ -88,10 +84,51 @@ class Nlu {
}
/**
* Check if the NLP model exists
* Load the main NLP model from the latest training
*/
hasNlpModel () {
return Object.keys(this.nlp).length > 0
loadMainModel (nlpModel) {
return new Promise(async (resolve, reject) => {
if (!fs.existsSync(nlpModel)) {
log.title('NLU')
reject({ type: 'warning', obj: new Error('The main NLP model does not exist, please run: npm run train') })
} else {
log.title('NLU')
try {
const container = await containerBootstrap()
container.register('extract-builtin-??', new BuiltinMicrosoft({
builtins: Ner.getMicrosoftBuiltinEntities()
}), true)
container.use(Nlp)
container.use(LangAll)
this.mainNlp = container.get('nlp')
const nluManager = container.get('nlu-manager')
nluManager.settings.spellCheck = true
await this.mainNlp.load(nlpModel)
log.success('Main NLP model loaded')
this.ner = new Ner(this.mainNlp.ner)
resolve()
} catch (err) {
this.brain.talk(`${this.brain.wernicke('random_errors')}! ${this.brain.wernicke('errors', 'nlu', { '%error%': err.message })}.`)
this.brain.socket.emit('is-typing', false)
reject({ type: 'error', obj: err })
}
}
})
}
/**
* Check if NLP models exists
*/
hasNlpModels () {
return Object.keys(this.resolversNlp).length > 0
&& Object.keys(this.mainNlp).length > 0
}
/**
@ -156,7 +193,7 @@ class Nlu {
}
}
this.nlp.addEntities(spacyEntity, this.brain.lang)
this.mainNlp.addEntities(spacyEntity, this.brain.lang)
})
}
}
@ -198,7 +235,7 @@ class Nlu {
hasMatchingEntity = this.nluResultObj
.entities.filter(({ entity }) => expectedItemName === entity).length > 0
} else if (expectedItemType === 'resolver') {
const { intent } = await this.nlp.process(utterance)
const { intent } = await this.resolversNlp.process(utterance)
const resolveResolvers = (resolver, intent) => {
const resolversPath = join(process.cwd(), 'core/data', this.brain.lang, 'resolvers')
const { intents } = JSON.parse(fs.readFileSync(join(resolversPath, `${resolver}.json`)))
@ -314,7 +351,7 @@ class Nlu {
}
utterance = string.ucfirst(utterance)
if (!this.hasNlpModel()) {
if (!this.hasNlpModels()) {
if (!opts.mute) {
this.brain.talk(`${this.brain.wernicke('random_errors')}!`)
this.brain.socket.emit('is-typing', false)
@ -345,7 +382,7 @@ class Nlu {
}
}
const result = await this.nlp.process(utterance)
const result = await this.mainNlp.process(utterance)
const {
locale, answers, classifications
} = result
@ -361,7 +398,7 @@ class Nlu {
classifications.forEach(({ intent: newIntent, score: newScore }) => {
if (newScore > 0.6) {
const [skillName] = newIntent.split('.')
const newDomain = this.nlp.getIntentDomain(locale, newIntent)
const newDomain = this.mainNlp.getIntentDomain(locale, newIntent)
const contextName = `${newDomain}.${skillName}`
if (this.conv.activeContext.name === contextName) {
score = newScore
@ -605,7 +642,7 @@ class Nlu {
* 3. Or go to the brain executor if all slots have been filled in one shot
*/
async routeSlotFilling (intent) {
const slots = await this.nlp.slotManager.getMandatorySlots(intent)
const slots = await this.mainNlp.slotManager.getMandatorySlots(intent)
const hasMandatorySlots = Object.keys(slots)?.length > 0
if (hasMandatorySlots) {