1
1
mirror of https://github.com/leon-ai/leon.git synced 2024-08-16 13:40:51 +03:00

style: fix linting

This commit is contained in:
Divlo 2023-04-02 19:18:15 +02:00
parent 070f0e1469
commit c49f931da4
No known key found for this signature in database
GPG Key ID: 8F9478F220CE65E9
28 changed files with 299 additions and 168 deletions

View File

@ -26,7 +26,6 @@
"plugins": ["@typescript-eslint", "unicorn", "import"],
"ignorePatterns": "*.spec.js",
"rules": {
"quotes": ["error", "single"],
"@typescript-eslint/no-non-null-assertion": ["off"],
"no-async-promise-executor": ["off"],
"no-underscore-dangle": ["error", { "allowAfterThis": true }],

1
.gitignore vendored
View File

@ -26,6 +26,7 @@ tcp_server/src/Pipfile.lock
!tcp_server/**/.gitkeep
!bridges/python/**/.gitkeep
!**/*.sample*
server/src/core/http-server/old-server.js
packages/**/config/config.json
skills/**/src/config.json
packages/**/data/db/*.json

View File

@ -3,9 +3,7 @@
{
"method": "POST",
"route": "/api/action/games/akinator/choose_thematic",
"params": [
"thematic"
],
"params": ["thematic"],
"entitiesType": "trim"
},
{
@ -46,9 +44,7 @@
{
"method": "POST",
"route": "/api/action/games/rochambeau/play",
"params": [
"handsign"
],
"params": ["handsign"],
"entitiesType": "trim"
},
{
@ -124,10 +120,7 @@
{
"method": "POST",
"route": "/api/action/news/github_trends/run",
"params": [
"number",
"daterange"
],
"params": ["number", "daterange"],
"entitiesType": "builtIn"
},
{
@ -138,9 +131,7 @@
{
"method": "POST",
"route": "/api/action/productivity/todo_list/create_list",
"params": [
"list"
],
"params": ["list"],
"entitiesType": "trim"
},
{
@ -151,53 +142,37 @@
{
"method": "POST",
"route": "/api/action/productivity/todo_list/view_list",
"params": [
"list"
],
"params": ["list"],
"entitiesType": "trim"
},
{
"method": "POST",
"route": "/api/action/productivity/todo_list/rename_list",
"params": [
"old_list",
"new_list"
],
"params": ["old_list", "new_list"],
"entitiesType": "trim"
},
{
"method": "POST",
"route": "/api/action/productivity/todo_list/delete_list",
"params": [
"list"
],
"params": ["list"],
"entitiesType": "trim"
},
{
"method": "POST",
"route": "/api/action/productivity/todo_list/add_todos",
"params": [
"todos",
"list"
],
"params": ["todos", "list"],
"entitiesType": "trim"
},
{
"method": "POST",
"route": "/api/action/productivity/todo_list/complete_todos",
"params": [
"todos",
"list"
],
"params": ["todos", "list"],
"entitiesType": "trim"
},
{
"method": "POST",
"route": "/api/action/productivity/todo_list/uncheck_todos",
"params": [
"todos",
"list"
],
"params": ["todos", "list"],
"entitiesType": "trim"
},
{
@ -218,9 +193,7 @@
{
"method": "POST",
"route": "/api/action/utilities/is_it_down/run",
"params": [
"url"
],
"params": ["url"],
"entitiesType": "builtIn"
},
{
@ -234,4 +207,4 @@
"params": []
}
]
}
}

View File

@ -30,7 +30,7 @@ dotenv.config()
;(async () => {
try {
const nodeMinRequiredVersion = '16'
const npmMinRequiredVersion = '5'
const npmMinRequiredVersion = '8'
const minimumRequiredRAM = 4
const flitePath = 'bin/flite/flite'
const coquiLanguageModelPath = 'bin/coqui/huge-vocabulary.scorer'
@ -215,7 +215,9 @@ dotenv.config()
const p = await command(
`${PYTHON_BRIDGE_BIN_PATH} "${path.join(
process.cwd(),
'scripts', 'assets', 'intent-object.json'
'scripts',
'assets',
'intent-object.json'
)}"`,
{ shell: true }
)

View File

@ -141,7 +141,7 @@ SPACY_MODELS.set('fr', {
}
try {
await command(`pipenv install --verbose --site-packages`, {
await command('pipenv install --verbose --site-packages', {
shell: true,
stdio: 'inherit'
})
@ -155,21 +155,21 @@ SPACY_MODELS.set('fr', {
LogHelper.info(
'Installing Rust installer as it is needed for the "tokenizers" package for macOS ARM64 architecture...'
)
await command(`curl https://sh.rustup.rs -sSf | sh -s -- -y`, {
await command('curl https://sh.rustup.rs -sSf | sh -s -- -y', {
shell: true,
stdio: 'inherit'
})
LogHelper.success('Rust installer installed')
LogHelper.info('Reloading configuration from "$HOME/.cargo/env"...')
await command(`source "$HOME/.cargo/env"`, {
await command('source "$HOME/.cargo/env"', {
shell: true,
stdio: 'inherit'
})
LogHelper.success('Configuration reloaded')
LogHelper.info('Checking Rust compiler version...')
await command(`rustc --version`, {
await command('rustc --version', {
shell: true,
stdio: 'inherit'
})

View File

@ -34,10 +34,10 @@ const TCP_SERVER_VERSION_FILE_PATH = path.join(
)
export const [, PYTHON_BRIDGE_VERSION] = fs
.readFileSync(PYTHON_BRIDGE_VERSION_FILE_PATH, 'utf8')
.split('\'')
.split("'")
export const [, TCP_SERVER_VERSION] = fs
.readFileSync(TCP_SERVER_VERSION_FILE_PATH, 'utf8')
.split('\'')
.split("'")
export const PYTHON_BRIDGE_BIN_NAME = 'leon-python-bridge'
export const TCP_SERVER_BIN_NAME = 'leon-tcp-server'

View File

@ -10,7 +10,11 @@ import type {
NLUResult
} from '@/core/nlp/types'
import type { SkillConfigSchema } from '@/schemas/skill-schemas'
import type { BrainProcessResult, IntentObject, SkillResult } from '@/core/brain/types'
import type {
BrainProcessResult,
IntentObject,
SkillResult
} from '@/core/brain/types'
import { SkillActionType, SkillOutputType } from '@/core/brain/types'
import { langs } from '@@/core/langs.json'
import { HAS_TTS, PYTHON_BRIDGE_BIN_PATH, TMP_PATH } from '@/constants'
@ -107,7 +111,11 @@ export default class Brain {
/**
* Pickup speech info we need to return
*/
public wernicke(type: string, key?: string, obj?: Record<string, unknown>): string {
public wernicke(
type: string,
key?: string,
obj?: Record<string, unknown>
): string {
let answerObject: Record<string, string> = {}
let answer = ''
@ -152,7 +160,11 @@ export default class Brain {
/**
* Create the intent object that will be passed to the skill
*/
private createIntentObject(nluResult: NLUResult, utteranceId: string, slots: IntentObject['slots']): IntentObject {
private createIntentObject(
nluResult: NLUResult,
utteranceId: string,
slots: IntentObject['slots']
): IntentObject {
return {
id: utteranceId,
lang: this._lang,
@ -171,7 +183,9 @@ export default class Brain {
/**
* Handle the skill process output
*/
private handleLogicActionSkillProcessOutput(data: Buffer): Promise<Error | null> | void {
private handleLogicActionSkillProcessOutput(
data: Buffer
): Promise<Error | null> | void {
try {
const obj = JSON.parse(data.toString())
@ -191,7 +205,11 @@ export default class Brain {
return Promise.resolve(null)
} else {
return Promise.reject(new Error(`The "${this.skillFriendlyName}" skill from the "${this.domainFriendlyName}" domain is not well configured. Check the configuration file.`))
return Promise.reject(
new Error(
`The "${this.skillFriendlyName}" skill from the "${this.domainFriendlyName}" domain is not well configured. Check the configuration file.`
)
)
}
} catch (e) {
LogHelper.title('Brain')
@ -217,7 +235,10 @@ export default class Brain {
/**
* Handle the skill process error
*/
private handleLogicActionSkillProcessError(data: Buffer, intentObjectPath: string): Error {
private handleLogicActionSkillProcessError(
data: Buffer,
intentObjectPath: string
): Error {
this.speakSkillError()
Brain.deleteIntentObjFile(intentObjectPath)
@ -250,7 +271,11 @@ export default class Brain {
})
}
const intentObject = this.createIntentObject(nluResult, utteranceId, slots)
const intentObject = this.createIntentObject(
nluResult,
utteranceId,
slots
)
try {
fs.writeFileSync(intentObjectPath, JSON.stringify(intentObject))
@ -294,8 +319,13 @@ export default class Brain {
skillConfigPath,
classification: { action: actionName }
} = nluResult
const { actions } = SkillDomainHelper.getSkillConfig(skillConfigPath, this._lang)
const action = actions[actionName] as SkillConfigSchema['actions'][string]
const { actions } = SkillDomainHelper.getSkillConfig(
skillConfigPath,
this._lang
)
const action = actions[
actionName
] as SkillConfigSchema['actions'][string]
const { type: actionType } = action
const nextAction = action.next_action
? actions[action.next_action]
@ -343,7 +373,8 @@ export default class Brain {
skillResult = JSON.parse(this.skillOutput)
if (skillResult?.output.speech) {
skillResult.output.speech = skillResult.output.speech.toString()
skillResult.output.speech =
skillResult.output.speech.toString()
if (!this.isMuted) {
this.talk(skillResult.output.speech, true)
}
@ -354,7 +385,8 @@ export default class Brain {
skillResult.output.type === SkillOutputType.End &&
skillResult.output.options['synchronization'] &&
skillResult.output.options['synchronization'].enabled &&
skillResult.output.options['synchronization'].enabled === true
skillResult.output.options['synchronization'].enabled ===
true
) {
const sync = new Synchronizer(
this,
@ -373,7 +405,9 @@ export default class Brain {
}
} catch (e) {
LogHelper.title(`${this.skillFriendlyName} skill`)
LogHelper.error(`There is an error on the final output: ${String(e)}`)
LogHelper.error(
`There is an error on the final output: ${String(e)}`
)
this.speakSkillError()
}
@ -395,7 +429,10 @@ export default class Brain {
) {
SOCKET_SERVER.socket.emit('suggest', nextAction.suggestions)
}
if (action?.suggestions && skillResult?.output.core?.showSuggestions) {
if (
action?.suggestions &&
skillResult?.output.core?.showSuggestions
) {
SOCKET_SERVER.socket.emit('suggest', action.suggestions)
}
@ -426,10 +463,8 @@ export default class Brain {
'config',
this._lang + '.json'
)
const { actions, entities: skillConfigEntities } = SkillDomainHelper.getSkillConfig(
configFilePath,
this._lang
)
const { actions, entities: skillConfigEntities } =
SkillDomainHelper.getSkillConfig(configFilePath, this._lang)
const utteranceHasEntities = nluResult.entities.length > 0
const { answers: rawAnswers } = nluResult
let answers = rawAnswers
@ -453,10 +488,14 @@ export default class Brain {
// In case the expected answer requires a known entity
if (answer?.indexOf('{{') !== -1) {
// TODO
const unknownAnswers = actions[nluResult.classification.action]?.unknown_answers
const unknownAnswers =
actions[nluResult.classification.action]?.unknown_answers
if (unknownAnswers) {
answer = unknownAnswers[Math.floor(Math.random() * unknownAnswers.length)]
answer =
unknownAnswers[
Math.floor(Math.random() * unknownAnswers.length)
]
}
}
} else {
@ -469,7 +508,8 @@ export default class Brain {
if (utteranceHasEntities && answer?.indexOf('{{') !== -1) {
nluResult.currentEntities.forEach((entityObj) => {
answer = StringHelper.findAndMap(answer as string, {
[`{{ ${entityObj.entity} }}`]: (entityObj as NERCustomEntity).resolution.value
[`{{ ${entityObj.entity} }}`]: (entityObj as NERCustomEntity)
.resolution.value
})
/**
@ -488,7 +528,8 @@ export default class Brain {
if (entity && dataKey && entity === entityObj.entity) {
const { option } = entityObj as CustomEnumEntity
const entityOption = skillConfigEntities[entity]?.options[option]
const entityOption =
skillConfigEntities[entity]?.options[option]
const entityOptionData = entityOption?.data
let valuesArr: string[] = []
@ -500,7 +541,9 @@ export default class Brain {
if (valuesArr.length > 0) {
answer = StringHelper.findAndMap(answer as string, {
[match]:
valuesArr[Math.floor(Math.random() * valuesArr.length)]
valuesArr[
Math.floor(Math.random() * valuesArr.length)
]
})
}
}

View File

@ -5,7 +5,8 @@ import type {
NLPSkill,
NLPUtterance,
NLUResolver,
NLUResult, NLUSlot,
NLUResult,
NLUSlot,
NLUSlots
} from '@/core/nlp/types'
import type { SkillConfigSchema } from '@/schemas/skill-schemas'
@ -62,7 +63,7 @@ export interface IntentObject {
export interface BrainProcessResult extends NLUResult {
speeches: string[]
executionTime: number
utteranceId? : string
utteranceId?: string
lang?: ShortLanguageCode
core?: SkillCoreData | undefined
action?: SkillConfigSchema['actions'][string]

View File

@ -81,7 +81,9 @@ export default class Conversation {
const newContextName = `${domain}.${skillName}`
const { actions } = SkillDomainHelper.getSkillConfig(skillConfigPath, lang)
// Grab next action from the NLU data file
const { next_action: nextAction } = actions[actionName] as { next_action: string }
const { next_action: nextAction } = actions[actionName] as {
next_action: string
}
// If slots are required to trigger next actions, then go through the context activation
if (slotKeys.length > 0) {
@ -183,7 +185,7 @@ export default class Conversation {
// If it's the first slot setting grabbed from the model or not
if (isFirstSet) {
[slotName, slotEntity] = key.split('#') as [string, string]
;[slotName, slotEntity] = key.split('#') as [string, string]
questions = slotObj.locales?.[lang] as string[]
}
@ -191,8 +193,9 @@ export default class Conversation {
const [foundEntity] = entities.filter(
({ entity }) => entity === slotEntity && !slotObj.isFilled
)
const pickedQuestion =
questions[Math.floor(Math.random() * questions.length)] as string
const pickedQuestion = questions[
Math.floor(Math.random() * questions.length)
] as string
const slot = this._activeContext.slots[slotName]
const newSlot = {
name: slotName,

View File

@ -8,6 +8,11 @@ import { LogHelper } from '@/helpers/log-helper'
import { SkillDomainHelper } from '@/helpers/skill-domain-helper'
import { DEFAULT_NLU_RESULT } from '@/core/nlp/nlu/nlu'
interface ResolveResolversResult {
name: string
value: string
}
export class ActionLoop {
/**
* Handle action loop logic before NLU processing
@ -43,7 +48,10 @@ export class ActionLoop {
NLU.nluResult
)
const { actions, resolvers } = SkillDomainHelper.getSkillConfig(skillConfigPath, BRAIN.lang)
const { actions, resolvers } = SkillDomainHelper.getSkillConfig(
skillConfigPath,
BRAIN.lang
)
const action = actions[NLU.nluResult.classification.action]
if (action?.loop) {
const { name: expectedItemName, type: expectedItemType } =
@ -67,7 +75,7 @@ export class ActionLoop {
const resolveResolvers = (
resolver: string,
intent: string
): [{ name: string, value: string }] => {
): [ResolveResolversResult] => {
const resolversPath = join(
process.cwd(),
'core',
@ -77,8 +85,10 @@ export class ActionLoop {
)
// Load the skill resolver or the global resolver
const resolvedIntents = !intent.includes('resolver.global')
? (resolvers && resolvers[resolver])
: JSON.parse(fs.readFileSync(join(resolversPath, `${resolver}.json`), 'utf8'))
? resolvers && resolvers[resolver]
: JSON.parse(
fs.readFileSync(join(resolversPath, `${resolver}.json`), 'utf8')
)
// E.g. resolver.global.denial -> denial
intent = intent.substring(intent.lastIndexOf('.') + 1)
@ -144,8 +154,10 @@ export class ActionLoop {
// Break the action loop and prepare for the next action if necessary
if (processedData.core?.isInActionLoop === false) {
NLU.conversation.activeContext.isInActionLoop = !!processedData.action?.loop
NLU.conversation.activeContext.actionName = processedData.action?.next_action as string
NLU.conversation.activeContext.isInActionLoop =
!!processedData.action?.loop
NLU.conversation.activeContext.actionName = processedData.action
?.next_action as string
NLU.conversation.activeContext.intent = `${processedData.classification?.skill}.${processedData.action?.next_action}`
}

View File

@ -51,9 +51,7 @@ export default class ModelLoader {
this.loadSkillsResolversModel(
path.join(MODELS_PATH, 'leon-skills-resolvers-model.nlp')
),
this.loadMainModel(
path.join(MODELS_PATH, 'leon-main-model.nlp')
)
this.loadMainModel(path.join(MODELS_PATH, 'leon-main-model.nlp'))
])
}
@ -65,7 +63,11 @@ export default class ModelLoader {
if (!fs.existsSync(modelPath)) {
LogHelper.title('Model Loader')
reject(new Error('The global resolvers NLP model does not exist, please run: npm run train'))
reject(
new Error(
'The global resolvers NLP model does not exist, please run: npm run train'
)
)
} else {
LogHelper.title('Model Loader')
@ -84,7 +86,11 @@ export default class ModelLoader {
resolve()
} catch (e) {
reject(new Error('An error occurred while loading the global resolvers NLP model'))
reject(
new Error(
'An error occurred while loading the global resolvers NLP model'
)
)
}
}
})
@ -120,7 +126,11 @@ export default class ModelLoader {
resolve()
} catch (e) {
reject(new Error('An error occurred while loading the skills resolvers NLP model'))
reject(
new Error(
'An error occurred while loading the skills resolvers NLP model'
)
)
}
}
})
@ -165,7 +175,9 @@ export default class ModelLoader {
resolve()
} catch (e) {
reject(new Error('An error occurred while loading the main NLP model'))
reject(
new Error('An error occurred while loading the main NLP model')
)
}
}
})

View File

@ -1,5 +1,10 @@
import type { ShortLanguageCode } from '@/types'
import type { NEREntity, NERSpacyEntity, NLPUtterance, NLUResult } from '@/core/nlp/types'
import type {
NEREntity,
NERSpacyEntity,
NLPUtterance,
NLUResult
} from '@/core/nlp/types'
import type {
SkillCustomEnumEntityTypeSchema,
SkillCustomRegexEntityTypeSchema,
@ -50,7 +55,9 @@ export default class NER {
LogHelper.success('Entities found:')
entities.forEach((entity) =>
LogHelper.success(`{ value: ${entity.sourceText}, entity: ${entity.entity} }`)
LogHelper.success(
`{ value: ${entity.sourceText}, entity: ${entity.entity} }`
)
)
}
@ -68,8 +75,13 @@ export default class NER {
const { classification } = nluResult
// Remove end-punctuation and add an end-whitespace
const utterance = `${StringHelper.removeEndPunctuation(nluResult.utterance)} `
const { actions } = SkillDomainHelper.getSkillConfig(skillConfigPath, lang)
const utterance = `${StringHelper.removeEndPunctuation(
nluResult.utterance
)} `
const { actions } = SkillDomainHelper.getSkillConfig(
skillConfigPath,
lang
)
const { action } = classification
const promises = []
const actionEntities = actions[action]?.entities || []
@ -92,10 +104,11 @@ export default class NER {
await Promise.all(promises)
const { entities }: { entities: NEREntity[] } = await this.manager.process({
locale: lang,
text: utterance
})
const { entities }: { entities: NEREntity[] } =
await this.manager.process({
locale: lang,
text: utterance
})
// Normalize entities
entities.map((entity) => {
@ -148,10 +161,13 @@ export default class NER {
*/
private getSpacyEntities(utterance: NLPUtterance): Promise<NERSpacyEntity[]> {
return new Promise((resolve) => {
const spacyEntitiesReceivedHandler =
async ({ spacyEntities }: { spacyEntities: NERSpacyEntity[] }): Promise<void> => {
resolve(spacyEntities)
}
const spacyEntitiesReceivedHandler = async ({
spacyEntities
}: {
spacyEntities: NERSpacyEntity[]
}): Promise<void> => {
resolve(spacyEntities)
}
TCP_CLIENT.ee.removeAllListeners()
TCP_CLIENT.ee.on('spacy-entities-received', spacyEntitiesReceivedHandler)
@ -192,7 +208,11 @@ export default class NER {
options: {}
}
this.manager.addRule(lang, entityConfig.name, 'trim', rule)
this.manager[conditionMethod](lang, entityConfig.name, condition?.from)
this.manager[conditionMethod](
lang,
entityConfig.name,
condition?.from
)
} else if (condition.type.indexOf('before') !== -1) {
this.manager[conditionMethod](lang, entityConfig.name, condition.to)
}
@ -210,7 +230,11 @@ export default class NER {
entityConfig: SkillCustomRegexEntityTypeSchema
): Promise<void> {
return new Promise((resolve) => {
this.manager.addRegexRule(lang, entityConfig.name, new RegExp(entityConfig.regex, 'g'))
this.manager.addRegexRule(
lang,
entityConfig.name,
new RegExp(entityConfig.regex, 'g')
)
resolve()
})

View File

@ -4,7 +4,14 @@ import { spawn } from 'node:child_process'
import kill from 'tree-kill'
import type { Language, ShortLanguageCode } from '@/types'
import type { NLPAction, NLPDomain, NLPJSProcessResult, NLPSkill, NLPUtterance, NLUResult } from '@/core/nlp/types'
import type {
NLPAction,
NLPDomain,
NLPJSProcessResult,
NLPSkill,
NLPUtterance,
NLUResult
} from '@/core/nlp/types'
import type { BrainProcessResult } from '@/core/brain/types'
import { langs } from '@@/core/langs.json'
import { TCP_SERVER_BIN_PATH } from '@/constants'
@ -15,14 +22,12 @@ import { ActionLoop } from '@/core/nlp/nlu/action-loop'
import { SlotFilling } from '@/core/nlp/nlu/slot-filling'
import Conversation, { DEFAULT_ACTIVE_CONTEXT } from '@/core/nlp/conversation'
type NLUProcessResult = Promise<
Partial<
BrainProcessResult & {
processingTime: number
nluProcessingTime: number
}
> | null
>
type NLUProcessResult = Promise<Partial<
BrainProcessResult & {
processingTime: number
nluProcessingTime: number
}
> | null>
export const DEFAULT_NLU_RESULT = {
utterance: '',
@ -86,9 +91,7 @@ export default class NLU {
* pick-up the right classification
* and extract entities
*/
public process(
utterance: NLPUtterance
): NLUProcessResult {
public process(utterance: NLPUtterance): NLUProcessResult {
const processingTimeStart = Date.now()
return new Promise(async (resolve, reject) => {
@ -127,7 +130,8 @@ export default class NLU {
}
}
const result: NLPJSProcessResult = await MODEL_LOADER.mainNLPContainer.process(utterance)
const result: NLPJSProcessResult =
await MODEL_LOADER.mainNLPContainer.process(utterance)
const { locale, answers, classifications } = result
let { score, intent, domain } = result
@ -141,7 +145,10 @@ export default class NLU {
classifications.forEach(({ intent: newIntent, score: newScore }) => {
if (newScore > 0.6) {
const [skillName] = newIntent.split('.')
const newDomain = MODEL_LOADER.mainNLPContainer.getIntentDomain(locale, newIntent)
const newDomain = MODEL_LOADER.mainNLPContainer.getIntentDomain(
locale,
newIntent
)
const contextName = `${newDomain}.${skillName}`
if (this.conversation.activeContext.name === contextName) {
score = newScore
@ -167,10 +174,7 @@ export default class NLU {
const isSupportedLanguage = LangHelper.getShortCodes().includes(locale)
if (!isSupportedLanguage) {
BRAIN.talk(
`${BRAIN.wernicke('random_language_not_supported')}.`,
true
)
BRAIN.talk(`${BRAIN.wernicke('random_language_not_supported')}.`, true)
SOCKET_SERVER.socket.emit('is-typing', false)
return resolve({})
}
@ -190,10 +194,7 @@ export default class NLU {
if (!fallback) {
if (!BRAIN.isMuted) {
BRAIN.talk(
`${BRAIN.wernicke('random_unknown_intents')}.`,
true
)
BRAIN.talk(`${BRAIN.wernicke('random_unknown_intents')}.`, true)
SOCKET_SERVER.socket.emit('is-typing', false)
}
@ -297,7 +298,8 @@ export default class NLU {
return resolve({
processingTime, // In ms, total time
...processedData,
nluProcessingTime: processingTime - (processedData?.executionTime || 0) // In ms, NLU processing time only
nluProcessingTime:
processingTime - (processedData?.executionTime || 0) // In ms, NLU processing time only
})
} catch (e) {
const errorMessage = `Failed to execute action: ${e}`
@ -333,9 +335,11 @@ export default class NLU {
if (JSON.stringify(tmpWords) === JSON.stringify(fallbacks[i]?.words)) {
this.nluResult.entities = []
this.nluResult.classification.domain = fallbacks[i]?.domain as NLPDomain
this.nluResult.classification.domain = fallbacks[i]
?.domain as NLPDomain
this.nluResult.classification.skill = fallbacks[i]?.skill as NLPSkill
this.nluResult.classification.action = fallbacks[i]?.action as NLPAction
this.nluResult.classification.action = fallbacks[i]
?.action as NLPAction
this.nluResult.classification.confidence = 1
LogHelper.success('Fallback found')

View File

@ -11,7 +11,9 @@ export class SlotFilling {
/**
* Handle slot filling
*/
public static async handle(utterance: NLPUtterance): Promise<Partial<BrainProcessResult> | null> {
public static async handle(
utterance: NLPUtterance
): Promise<Partial<BrainProcessResult> | null> {
const processedData = await this.fillSlot(utterance)
/**
@ -48,7 +50,9 @@ export class SlotFilling {
* Build NLU data result object based on slots
* and ask for more entities if necessary
*/
public static async fillSlot(utterance: NLPUtterance): Promise<Partial<BrainProcessResult> | null> {
public static async fillSlot(
utterance: NLPUtterance
): Promise<Partial<BrainProcessResult> | null> {
if (!NLU.conversation.activeContext.nextAction) {
return null
}
@ -136,7 +140,8 @@ export class SlotFilling {
* 3. Or go to the brain executor if all slots have been filled in one shot
*/
public static async route(intent: string): Promise<boolean> {
const slots = await MODEL_LOADER.mainNLPContainer.slotManager.getMandatorySlots(intent)
const slots =
await MODEL_LOADER.mainNLPContainer.slotManager.getMandatorySlots(intent)
const hasMandatorySlots = Object.keys(slots)?.length > 0
if (hasMandatorySlots) {
@ -156,10 +161,14 @@ export class SlotFilling {
const notFilledSlot = NLU.conversation.getNotFilledSlot()
// Loop for questions if a slot hasn't been filled
if (notFilledSlot) {
const { actions } = SkillDomainHelper.getSkillConfig(NLU.nluResult.skillConfigPath, BRAIN.lang)
const [currentSlot] = actions[
NLU.nluResult.classification.action
]?.slots?.filter(({ name }) => name === notFilledSlot.name) ?? []
const { actions } = SkillDomainHelper.getSkillConfig(
NLU.nluResult.skillConfigPath,
BRAIN.lang
)
const [currentSlot] =
actions[NLU.nluResult.classification.action]?.slots?.filter(
({ name }) => name === notFilledSlot.name
) ?? []
SOCKET_SERVER.socket.emit('suggest', currentSlot?.suggestions)
BRAIN.talk(notFilledSlot.pickedQuestion)

View File

@ -275,8 +275,14 @@ export interface CustomRegexEntity extends CustomEntity<'regex'> {
}
}
interface CustomTrimEntity extends CustomEntity<'trim'> {
subtype: 'between' | 'after' | 'afterFirst' | 'afterLast'
| 'before' | 'beforeFirst' | 'beforeLast'
subtype:
| 'between'
| 'after'
| 'afterFirst'
| 'afterLast'
| 'before'
| 'beforeFirst'
| 'beforeLast'
resolution: {
value: string
}

View File

@ -6,13 +6,17 @@ import type { Socket } from 'node:net'
import { Server as SocketIOServer } from 'socket.io'
import { LANG, HAS_STT, HAS_TTS, IS_DEVELOPMENT_ENV } from '@/constants'
import {
LANG,
HAS_STT,
HAS_TTS,
IS_DEVELOPMENT_ENV
} from '@/constants'
import { HTTP_SERVER, TCP_CLIENT, ASR, STT, TTS, NLU, BRAIN, MODEL_LOADER } from '@/core'
HTTP_SERVER,
TCP_CLIENT,
ASR,
STT,
TTS,
NLU,
BRAIN,
MODEL_LOADER
} from '@/core'
import { LogHelper } from '@/helpers/log-helper'
import { LangHelper } from '@/helpers/lang-helper'
@ -48,9 +52,7 @@ export default class SocketServer {
if (HAS_TTS) {
ttsState = 'enabled'
await TTS.init(
LangHelper.getShortCode(LANG)
)
await TTS.init(LangHelper.getShortCode(LANG))
}
LogHelper.title('Initialization')
@ -121,7 +123,9 @@ export default class SocketServer {
try {
await ASR.encode(data)
} catch (e) {
LogHelper.error(`ASR - Failed to encode audio blob to WAVE file: ${e.stack}`)
LogHelper.error(
`ASR - Failed to encode audio blob to WAVE file: ${e.stack}`
)
}
})
}

View File

@ -11,6 +11,11 @@ const INTERVAL = IS_PRODUCTION_ENV ? 3000 : 500
// Number of retries to connect to the TCP server
const RETRIES_NB = IS_PRODUCTION_ENV ? 8 : 30
interface ChunkData {
topic: string
data: unknown
}
export default class TCPClient {
private static instance: TCPClient
@ -50,7 +55,7 @@ export default class TCPClient {
this.ee.emit('connected', null)
})
this.tcpSocket.on('data', (chunk: { topic: string, data: unknown }) => {
this.tcpSocket.on('data', (chunk: ChunkData) => {
LogHelper.title('TCP Client')
LogHelper.info(`Received data: ${String(chunk)}`)

View File

@ -73,7 +73,11 @@ export default class GoogleCloudTTSSynthesizer extends TTSSynthesizerBase {
}
})
await fs.promises.writeFile(audioFilePath, response.audioContent as Uint8Array | string, 'binary')
await fs.promises.writeFile(
audioFilePath,
response.audioContent as Uint8Array | string,
'binary'
)
const duration = await this.getAudioDuration(audioFilePath)

View File

@ -10,7 +10,9 @@ export abstract class TTSSynthesizerBase {
protected abstract name: string
protected abstract lang: LongLanguageCode
protected abstract synthesize(speech: string): Promise<SynthesizeResult | null>
protected abstract synthesize(
speech: string
): Promise<SynthesizeResult | null>
protected async getAudioDuration(audioFilePath: string): Promise<number> {
ffmpeg.setFfmpegPath(ffmpegPath)

View File

@ -10,7 +10,7 @@ import { TTSSynthesizers, TTSProviders } from '@/core/tts/types'
import { LogHelper } from '@/helpers/log-helper'
import { LangHelper } from '@/helpers/lang-helper'
type Speech = {
interface Speech {
text: string
isFinalAnswer: boolean
}
@ -150,7 +150,10 @@ export default class TTS {
/**
* Add speeches to the queue
*/
public async add(text: Speech['text'], isFinalAnswer: Speech['isFinalAnswer']): Promise<Speech[]> {
public async add(
text: Speech['text'],
isFinalAnswer: Speech['isFinalAnswer']
): Promise<Speech[]> {
/**
* Flite fix. When the string is only one word,
* Flite cannot save to a file. So we add a space at the end of the string

View File

@ -17,9 +17,14 @@ export enum TTSSynthesizers {
Flite = 'flite-synthesizer'
}
export type SynthesizeResult = {
export interface SynthesizeResult {
audioFilePath: string
duration: number
}
export type TTSSynthesizer = AmazonPollySynthesizer | FliteSynthesizer | GoogleCloudTTSSynthesizer | WatsonTTSSynthesizer | undefined
export type TTSSynthesizer =
| AmazonPollySynthesizer
| FliteSynthesizer
| GoogleCloudTTSSynthesizer
| WatsonTTSSynthesizer
| undefined

View File

@ -9,10 +9,10 @@ declare module '@ffprobe-installer/ffprobe' {
* @see https://github.com/axa-group/nlp.js/tree/master/packages
*/
interface BuiltinMicrosoft<T> {
new(settings: unknown, container: unknown): T
new (settings: unknown, container: unknown): T
}
interface Nlp<T> {
new(settings: unknown, container: unknown): T
new (settings: unknown, container: unknown): T
}
interface LangAll {
register(container: unknown)

View File

@ -17,9 +17,7 @@ export class LangHelper {
* @param shortCode The short language code of the language
* @example getLongCode('en') // en-US
*/
public static getLongCode(
shortCode: ShortLanguageCode
): LongLanguageCode {
public static getLongCode(shortCode: ShortLanguageCode): LongLanguageCode {
for (const longLanguage in langs) {
const longLanguageType = longLanguage as LongLanguageCode
const lang = langs[longLanguageType]

View File

@ -22,7 +22,8 @@ interface SkillDomain {
}
}
interface SkillConfigWithGlobalEntities extends Omit<SkillConfigSchema, 'entities'> {
interface SkillConfigWithGlobalEntities
extends Omit<SkillConfigSchema, 'entities'> {
entities: Record<string, GlobalEntitySchema>
}
@ -138,7 +139,9 @@ export class SkillDomainHelper {
encoding: 'utf8'
})
result.entities[entity] = JSON.parse(entityRawData) as GlobalEntitySchema
result.entities[entity] = JSON.parse(
entityRawData
) as GlobalEntitySchema
}
})

View File

@ -192,6 +192,12 @@ export type DomainSchema = Static<typeof domainSchemaObject>
export type SkillSchema = Static<typeof skillSchemaObject>
export type SkillConfigSchema = Static<typeof skillConfigSchemaObject>
export type SkillBridgeSchema = Static<typeof skillSchemaObject.bridge>
export type SkillCustomTrimEntityTypeSchema = Static<typeof skillCustomTrimEntityType>
export type SkillCustomRegexEntityTypeSchema = Static<typeof skillCustomRegexEntityType>
export type SkillCustomEnumEntityTypeSchema = Static<typeof skillCustomEnumEntityType>
export type SkillCustomTrimEntityTypeSchema = Static<
typeof skillCustomTrimEntityType
>
export type SkillCustomRegexEntityTypeSchema = Static<
typeof skillCustomRegexEntityType
>
export type SkillCustomEnumEntityTypeSchema = Static<
typeof skillCustomEnumEntityType
>

View File

@ -40,11 +40,15 @@ export const watsonVoiceConfiguration = Type.Strict(
)
)
export type AmazonVoiceConfigurationSchema = Static<typeof amazonVoiceConfiguration>
export type AmazonVoiceConfigurationSchema = Static<
typeof amazonVoiceConfiguration
>
export type GoogleCloudVoiceConfigurationSchema = Static<
typeof googleCloudVoiceConfiguration
>
export type WatsonVoiceConfigurationSchema = Static<typeof watsonVoiceConfiguration>
export type WatsonVoiceConfigurationSchema = Static<
typeof watsonVoiceConfiguration
>
export type VoiceConfigurationSchema =
| AmazonVoiceConfigurationSchema
| GoogleCloudVoiceConfigurationSchema

View File

@ -41,7 +41,10 @@ class Tts {
) {
process.env.GOOGLE_APPLICATION_CREDENTIALS = path.join(
process.cwd(),
'core', 'config', 'voice', 'google-cloud.json'
'core',
'config',
'voice',
'google-cloud.json'
)
} else if (
typeof process.env.GOOGLE_APPLICATION_CREDENTIALS !== 'undefined' &&

View File

@ -68,7 +68,12 @@
"answer": {
"intents": {
"yes": {
"utterance_samples": ["[Yes|Yep|Yup|Yeah]", "Of course", "Sure", "Correct"],
"utterance_samples": [
"[Yes|Yep|Yup|Yeah]",
"Of course",
"Sure",
"Correct"
],
"value": "y"
},
"no": {