1
1
mirror of https://github.com/leon-ai/leon.git synced 2024-11-27 08:06:03 +03:00

refactor(server): model loader error handling

This commit is contained in:
louistiti 2023-03-20 20:52:42 +08:00
parent 5369da1734
commit b51bf4b1c2
6 changed files with 33 additions and 52 deletions

View File

@ -2,8 +2,7 @@
"answers": {
"success": {},
"errors": {
"not_found": "Sorry, it seems I cannot find that",
"nlu": "It might come from my natural language understanding, the error returned is: \"%error%\""
"not_found": "Sorry, it seems I cannot find that"
},
"synchronizer": {
"syncing_direct": "I will now synchronize the downloaded content on your current device. Don't worry, I will let you know once I'm done",
@ -12,10 +11,10 @@
"synced_google_drive": "The new content is now available on Google Drive"
},
"random_errors": [
"Sorry, there is a problem with my system",
"Sorry, I don't work correctly",
"Sorry, you need to fix me",
"Sorry, I cannot do that because I'm broken"
"Sorry, there is a problem with my system. Please check my logs for further details",
"Sorry, I don't work correctly. Please look at my logs for more information",
"Sorry, you need to fix me. Please take a look at my logs for further information",
"Sorry, I cannot do that because I'm broken. Please check my logs for further details"
],
"random_skill_errors": [
"Sorry, it seems I have a problem with the \"%skill_name%\" skill from the \"%domain_name%\" domain",

View File

@ -22,7 +22,14 @@
"That works",
"Go ahead",
"Why not",
"Please"
"Please",
"Absolutely",
"Precisely",
"Spot on",
"Undoubtedly",
"Certainly",
"Without a doubt",
"Definitely"
],
"value": true
},
@ -36,7 +43,10 @@
"No thanks",
"No I'm fine",
"Hell no",
"Please do not"
"Please do not",
"I disagree",
"Negative",
"Not at all"
],
"value": false
}

View File

@ -12,10 +12,10 @@
"synced_google_drive": "Le nouveau contenu est maintenant disponible sur Google Drive"
},
"random_errors": [
"Désolé, il y a un problème avec mon système",
"Désolé, je ne fonctionne pas correctement",
"Désolé, vous devez me réparer",
"Désolé, je ne peux aboutir à votre demande parce que je suis cassé"
"Désolé, il y a un problème avec mon système. Veuillez consulter mes logs pour plus de détails",
"Désolé, je ne fonctionne pas correctement. Merci de regarder mes logs pour plus d'information",
"Désolé, vous devez me réparer. Veuillez vérifier mes logs pour en savoir plus",
"Désolé, je ne peux aboutir à votre demande parce que je suis cassé. Regardez mes logs pour plus de détails"
],
"random_skill_errors": [
"Désolé, il semblerait y avoir un problème avec le skill \"%skill_name%\" du domaine \"%domain_name%\"",

View File

@ -7,7 +7,7 @@ import { BuiltinMicrosoft } from '@nlpjs/builtin-microsoft'
import { LangAll } from '@nlpjs/lang-all'
import { MODELS_PATH } from '@/constants'
import { BRAIN, NER, SOCKET_SERVER } from '@/core'
import { NER } from '@/core'
import Ner from '@/core/ner'
import { LogHelper } from '@/helpers/log-helper'
@ -65,14 +65,9 @@ export default class ModelLoader {
if (!fs.existsSync(modelPath)) {
LogHelper.title('Model Loader')
reject({
type: 'warning',
obj: new Error(
'The global resolvers NLP model does not exist, please run: npm run train'
)
})
reject(new Error('The global resolvers NLP model does not exist, please run: npm run train'))
} else {
LogHelper.title('NLU')
LogHelper.title('Model Loader')
try {
const container = await containerBootstrap()
@ -89,16 +84,7 @@ export default class ModelLoader {
resolve()
} catch (e) {
BRAIN.talk(
`${BRAIN.wernicke('random_errors')}! ${BRAIN.wernicke(
'errors',
'nlu',
{ '%error%': e.message }
)}.`
)
SOCKET_SERVER.socket.emit('is-typing', false)
reject({ type: 'error', obj: e })
reject(new Error('An error occurred while loading the global resolvers NLP model'))
}
}
})
@ -134,16 +120,7 @@ export default class ModelLoader {
resolve()
} catch (e) {
BRAIN.talk(
`${BRAIN.wernicke('random_errors')}! ${BRAIN.wernicke(
'errors',
'nlu',
{ '%error%': e.message }
)}.`
)
SOCKET_SERVER.socket.emit('is-typing', false)
reject({ type: 'error', obj: e })
reject(new Error('An error occurred while loading the skills resolvers NLP model'))
}
}
})
@ -188,16 +165,7 @@ export default class ModelLoader {
resolve()
} catch (e) {
BRAIN.talk(
`${BRAIN.wernicke('random_errors')}! ${BRAIN.wernicke(
'errors',
'nlu',
{ '%error%': e.message }
)}.`
)
SOCKET_SERVER.socket.emit('is-typing', false)
reject({ type: 'error', obj: e })
reject(new Error('An error occurred while loading the main NLP model'))
}
}
})

View File

@ -311,7 +311,7 @@ export default class NLU {
}
const msg =
'The NLP model is missing, please rebuild the project or if you are in dev run: npm run train'
'An NLP model is missing, please rebuild the project or if you are in dev run: npm run train'
LogHelper.error(msg)
return reject(msg)
}

View File

@ -58,7 +58,11 @@ export default class SocketServer {
LogHelper.success(`STT ${sttState}`)
LogHelper.success(`TTS ${ttsState}`)
await MODEL_LOADER.loadNLPModels()
try {
await MODEL_LOADER.loadNLPModels()
} catch (e) {
LogHelper.error(`Failed to load NLP models: ${e}`)
}
io.on('connection', (socket) => {
LogHelper.title('Client')