mirror of
https://github.com/leon-ai/leon.git
synced 2024-11-23 20:12:08 +03:00
refactor: usage of fs.promises
This commit is contained in:
parent
c49f931da4
commit
bd66bb5cb7
@ -1,9 +1,123 @@
|
||||
{
|
||||
"endpoints": [
|
||||
{
|
||||
"method": "POST",
|
||||
"route": "/api/action/news/github_trends/run",
|
||||
"params": [
|
||||
"number",
|
||||
"daterange"
|
||||
],
|
||||
"entitiesType": "builtIn"
|
||||
},
|
||||
{
|
||||
"method": "GET",
|
||||
"route": "/api/action/news/product_hunt_trends/run",
|
||||
"params": []
|
||||
},
|
||||
{
|
||||
"method": "POST",
|
||||
"route": "/api/action/productivity/todo_list/create_list",
|
||||
"params": [
|
||||
"list"
|
||||
],
|
||||
"entitiesType": "trim"
|
||||
},
|
||||
{
|
||||
"method": "GET",
|
||||
"route": "/api/action/productivity/todo_list/view_lists",
|
||||
"params": []
|
||||
},
|
||||
{
|
||||
"method": "POST",
|
||||
"route": "/api/action/productivity/todo_list/view_list",
|
||||
"params": [
|
||||
"list"
|
||||
],
|
||||
"entitiesType": "trim"
|
||||
},
|
||||
{
|
||||
"method": "POST",
|
||||
"route": "/api/action/productivity/todo_list/rename_list",
|
||||
"params": [
|
||||
"old_list",
|
||||
"new_list"
|
||||
],
|
||||
"entitiesType": "trim"
|
||||
},
|
||||
{
|
||||
"method": "POST",
|
||||
"route": "/api/action/productivity/todo_list/delete_list",
|
||||
"params": [
|
||||
"list"
|
||||
],
|
||||
"entitiesType": "trim"
|
||||
},
|
||||
{
|
||||
"method": "POST",
|
||||
"route": "/api/action/productivity/todo_list/add_todos",
|
||||
"params": [
|
||||
"todos",
|
||||
"list"
|
||||
],
|
||||
"entitiesType": "trim"
|
||||
},
|
||||
{
|
||||
"method": "POST",
|
||||
"route": "/api/action/productivity/todo_list/complete_todos",
|
||||
"params": [
|
||||
"todos",
|
||||
"list"
|
||||
],
|
||||
"entitiesType": "trim"
|
||||
},
|
||||
{
|
||||
"method": "POST",
|
||||
"route": "/api/action/productivity/todo_list/uncheck_todos",
|
||||
"params": [
|
||||
"todos",
|
||||
"list"
|
||||
],
|
||||
"entitiesType": "trim"
|
||||
},
|
||||
{
|
||||
"method": "GET",
|
||||
"route": "/api/action/social_communication/mbti/setup",
|
||||
"params": []
|
||||
},
|
||||
{
|
||||
"method": "GET",
|
||||
"route": "/api/action/social_communication/mbti/quiz",
|
||||
"params": []
|
||||
},
|
||||
{
|
||||
"method": "GET",
|
||||
"route": "/api/action/utilities/have_i_been_pwned/run",
|
||||
"params": []
|
||||
},
|
||||
{
|
||||
"method": "POST",
|
||||
"route": "/api/action/utilities/is_it_down/run",
|
||||
"params": [
|
||||
"url"
|
||||
],
|
||||
"entitiesType": "builtIn"
|
||||
},
|
||||
{
|
||||
"method": "GET",
|
||||
"route": "/api/action/utilities/speed_test/run",
|
||||
"params": []
|
||||
},
|
||||
{
|
||||
"method": "GET",
|
||||
"route": "/api/action/utilities/youtube_downloader/run",
|
||||
"params": []
|
||||
},
|
||||
{
|
||||
"method": "POST",
|
||||
"route": "/api/action/games/akinator/choose_thematic",
|
||||
"params": ["thematic"],
|
||||
"params": [
|
||||
"thematic"
|
||||
],
|
||||
"entitiesType": "trim"
|
||||
},
|
||||
{
|
||||
@ -44,7 +158,9 @@
|
||||
{
|
||||
"method": "POST",
|
||||
"route": "/api/action/games/rochambeau/play",
|
||||
"params": ["handsign"],
|
||||
"params": [
|
||||
"handsign"
|
||||
],
|
||||
"entitiesType": "trim"
|
||||
},
|
||||
{
|
||||
@ -116,95 +232,6 @@
|
||||
"method": "GET",
|
||||
"route": "/api/action/leon/welcome/run",
|
||||
"params": []
|
||||
},
|
||||
{
|
||||
"method": "POST",
|
||||
"route": "/api/action/news/github_trends/run",
|
||||
"params": ["number", "daterange"],
|
||||
"entitiesType": "builtIn"
|
||||
},
|
||||
{
|
||||
"method": "GET",
|
||||
"route": "/api/action/news/product_hunt_trends/run",
|
||||
"params": []
|
||||
},
|
||||
{
|
||||
"method": "POST",
|
||||
"route": "/api/action/productivity/todo_list/create_list",
|
||||
"params": ["list"],
|
||||
"entitiesType": "trim"
|
||||
},
|
||||
{
|
||||
"method": "GET",
|
||||
"route": "/api/action/productivity/todo_list/view_lists",
|
||||
"params": []
|
||||
},
|
||||
{
|
||||
"method": "POST",
|
||||
"route": "/api/action/productivity/todo_list/view_list",
|
||||
"params": ["list"],
|
||||
"entitiesType": "trim"
|
||||
},
|
||||
{
|
||||
"method": "POST",
|
||||
"route": "/api/action/productivity/todo_list/rename_list",
|
||||
"params": ["old_list", "new_list"],
|
||||
"entitiesType": "trim"
|
||||
},
|
||||
{
|
||||
"method": "POST",
|
||||
"route": "/api/action/productivity/todo_list/delete_list",
|
||||
"params": ["list"],
|
||||
"entitiesType": "trim"
|
||||
},
|
||||
{
|
||||
"method": "POST",
|
||||
"route": "/api/action/productivity/todo_list/add_todos",
|
||||
"params": ["todos", "list"],
|
||||
"entitiesType": "trim"
|
||||
},
|
||||
{
|
||||
"method": "POST",
|
||||
"route": "/api/action/productivity/todo_list/complete_todos",
|
||||
"params": ["todos", "list"],
|
||||
"entitiesType": "trim"
|
||||
},
|
||||
{
|
||||
"method": "POST",
|
||||
"route": "/api/action/productivity/todo_list/uncheck_todos",
|
||||
"params": ["todos", "list"],
|
||||
"entitiesType": "trim"
|
||||
},
|
||||
{
|
||||
"method": "GET",
|
||||
"route": "/api/action/social_communication/mbti/setup",
|
||||
"params": []
|
||||
},
|
||||
{
|
||||
"method": "GET",
|
||||
"route": "/api/action/social_communication/mbti/quiz",
|
||||
"params": []
|
||||
},
|
||||
{
|
||||
"method": "GET",
|
||||
"route": "/api/action/utilities/have_i_been_pwned/run",
|
||||
"params": []
|
||||
},
|
||||
{
|
||||
"method": "POST",
|
||||
"route": "/api/action/utilities/is_it_down/run",
|
||||
"params": ["url"],
|
||||
"entitiesType": "builtIn"
|
||||
},
|
||||
{
|
||||
"method": "GET",
|
||||
"route": "/api/action/utilities/speed_test/run",
|
||||
"params": []
|
||||
},
|
||||
{
|
||||
"method": "GET",
|
||||
"route": "/api/action/utilities/youtube_downloader/run",
|
||||
"params": []
|
||||
}
|
||||
]
|
||||
}
|
@ -312,7 +312,8 @@ dotenv.config()
|
||||
|
||||
if (
|
||||
!fs.existsSync(globalResolversNlpModelPath) ||
|
||||
!Object.keys(fs.readFileSync(globalResolversNlpModelPath)).length
|
||||
!Object.keys(await fs.promises.readFile(globalResolversNlpModelPath))
|
||||
.length
|
||||
) {
|
||||
const state = 'Global resolvers NLP model not found or broken'
|
||||
|
||||
@ -340,7 +341,8 @@ dotenv.config()
|
||||
|
||||
if (
|
||||
!fs.existsSync(skillsResolversNlpModelPath) ||
|
||||
!Object.keys(fs.readFileSync(skillsResolversNlpModelPath)).length
|
||||
!Object.keys(await fs.promises.readFile(skillsResolversNlpModelPath))
|
||||
.length
|
||||
) {
|
||||
const state = 'Skills resolvers NLP model not found or broken'
|
||||
|
||||
@ -368,7 +370,7 @@ dotenv.config()
|
||||
|
||||
if (
|
||||
!fs.existsSync(mainNlpModelPath) ||
|
||||
!Object.keys(fs.readFileSync(mainNlpModelPath)).length
|
||||
!Object.keys(await fs.promises.readFile(mainNlpModelPath)).length
|
||||
) {
|
||||
const state = 'Main NLP model not found or broken'
|
||||
|
||||
@ -395,7 +397,7 @@ dotenv.config()
|
||||
LogHelper.info('Amazon Polly TTS')
|
||||
|
||||
try {
|
||||
const json = JSON.parse(fs.readFileSync(amazonPath))
|
||||
const json = JSON.parse(await fs.promises.readFile(amazonPath))
|
||||
if (
|
||||
json.credentials.accessKeyId === '' ||
|
||||
json.credentials.secretAccessKey === ''
|
||||
@ -413,7 +415,7 @@ dotenv.config()
|
||||
LogHelper.info('Google Cloud TTS/STT')
|
||||
|
||||
try {
|
||||
const json = JSON.parse(fs.readFileSync(googleCloudPath))
|
||||
const json = JSON.parse(await fs.promises.readFile(googleCloudPath))
|
||||
const results = []
|
||||
Object.keys(json).forEach((item) => {
|
||||
if (json[item] === '') results.push(false)
|
||||
@ -434,7 +436,7 @@ dotenv.config()
|
||||
LogHelper.info('Watson TTS')
|
||||
|
||||
try {
|
||||
const json = JSON.parse(fs.readFileSync(watsonTtsPath))
|
||||
const json = JSON.parse(await fs.promises.readFile(watsonTtsPath))
|
||||
const results = []
|
||||
Object.keys(json).forEach((item) => {
|
||||
if (json[item] === '') results.push(false)
|
||||
@ -464,7 +466,7 @@ dotenv.config()
|
||||
LogHelper.info('Watson STT')
|
||||
|
||||
try {
|
||||
const json = JSON.parse(fs.readFileSync(watsonSttPath))
|
||||
const json = JSON.parse(await fs.promises.readFile(watsonSttPath))
|
||||
const results = []
|
||||
Object.keys(json).forEach((item) => {
|
||||
if (json[item] === '') results.push(false)
|
||||
|
@ -22,13 +22,13 @@ export default () =>
|
||||
try {
|
||||
// TODO: handle case where the memory folder contain multiple DB nodes
|
||||
const dbFolder = join(currentSkill.path, 'memory')
|
||||
const dbTestFiles = fs
|
||||
.readdirSync(dbFolder)
|
||||
.filter((entity) => entity.indexOf('.spec.json') !== -1)
|
||||
const dbTestFiles = (await fs.promises.readdir(dbFolder)).filter(
|
||||
(entity) => entity.indexOf('.spec.json') !== -1
|
||||
)
|
||||
|
||||
if (dbTestFiles.length > 0) {
|
||||
LogHelper.info(`Deleting ${dbTestFiles[0]}...`)
|
||||
fs.unlinkSync(join(dbFolder, dbTestFiles[0]))
|
||||
await fs.promises.unlink(join(dbFolder, dbTestFiles[0]))
|
||||
LogHelper.success(`${dbTestFiles[0]} deleted`)
|
||||
}
|
||||
} catch (e) {
|
||||
|
@ -6,13 +6,17 @@ import { LogHelper } from '@/helpers/log-helper'
|
||||
* This script is executed after "git commit" or "git merge" (Git hook https://git-scm.com/docs/githooks#_commit_msg)
|
||||
* it ensures the authenticity of commit messages
|
||||
*/
|
||||
LogHelper.info('Checking commit message...')
|
||||
;(async () => {
|
||||
LogHelper.info('Checking commit message...')
|
||||
|
||||
const commitEditMsgFile = '.git/COMMIT_EDITMSG'
|
||||
const commitEditMsgFile = '.git/COMMIT_EDITMSG'
|
||||
|
||||
if (fs.existsSync(commitEditMsgFile)) {
|
||||
if (fs.existsSync(commitEditMsgFile)) {
|
||||
try {
|
||||
const commitMessage = fs.readFileSync(commitEditMsgFile, 'utf8')
|
||||
const commitMessage = await fs.promises.readFile(
|
||||
commitEditMsgFile,
|
||||
'utf8'
|
||||
)
|
||||
const regex =
|
||||
'(build|BREAKING|chore|ci|docs|feat|fix|perf|refactor|style|test)(\\((web app|docker|server|hotword|tcp server|python bridge|skill\\/([\\w-]+)))?\\)?: .{1,50}'
|
||||
|
||||
@ -26,4 +30,5 @@ if (fs.existsSync(commitEditMsgFile)) {
|
||||
LogHelper.error(e.message)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
})()
|
||||
|
@ -23,7 +23,7 @@ const generateHttpApiKey = () =>
|
||||
const str = StringHelper.random(11)
|
||||
const dotEnvPath = path.join(process.cwd(), '.env')
|
||||
const envVarKey = 'LEON_HTTP_API_KEY'
|
||||
let content = fs.readFileSync(dotEnvPath, 'utf8')
|
||||
let content = await fs.promises.readFile(dotEnvPath, 'utf8')
|
||||
|
||||
shasum.update(str)
|
||||
const sha1 = shasum.digest('hex')
|
||||
@ -39,7 +39,7 @@ const generateHttpApiKey = () =>
|
||||
|
||||
content = lines.join('\n')
|
||||
|
||||
fs.writeFileSync(dotEnvPath, content)
|
||||
await fs.promises.writeFile(dotEnvPath, content)
|
||||
LogHelper.success('HTTP API key generated')
|
||||
|
||||
resolve()
|
||||
|
@ -39,7 +39,9 @@ export default () =>
|
||||
|
||||
// Check if a new routing generation is necessary
|
||||
if (fs.existsSync(outputFilePath)) {
|
||||
const mtimeEndpoints = fs.statSync(outputFilePath).mtime.getTime()
|
||||
const mtimeEndpoints = (
|
||||
await fs.promises.stat(outputFilePath)
|
||||
).mtime.getTime()
|
||||
|
||||
let i = 0
|
||||
for (const currentDomain of skillDomains.values()) {
|
||||
@ -49,7 +51,7 @@ export default () =>
|
||||
for (let j = 0; j < skillKeys.length; j += 1) {
|
||||
const skillFriendlyName = skillKeys[j]
|
||||
const currentSkill = currentDomain.skills[skillFriendlyName]
|
||||
const fileInfo = fs.statSync(
|
||||
const fileInfo = await fs.promises.stat(
|
||||
path.join(currentSkill.path, 'config', `${lang}.json`)
|
||||
)
|
||||
const mtime = fileInfo.mtime.getTime()
|
||||
@ -91,7 +93,7 @@ export default () =>
|
||||
`${lang}.json`
|
||||
)
|
||||
const { actions } = JSON.parse(
|
||||
fs.readFileSync(configFilePath, 'utf8')
|
||||
await fs.promises.readFile(configFilePath, 'utf8')
|
||||
)
|
||||
const actionsKeys = Object.keys(actions)
|
||||
|
||||
@ -145,7 +147,10 @@ export default () =>
|
||||
|
||||
LogHelper.info(`Writing ${outputFile} file...`)
|
||||
try {
|
||||
fs.writeFileSync(outputFilePath, JSON.stringify(finalObj, null, 2))
|
||||
await fs.promises.writeFile(
|
||||
outputFilePath,
|
||||
JSON.stringify(finalObj, null, 2)
|
||||
)
|
||||
LogHelper.success(`${outputFile} file generated`)
|
||||
resolve()
|
||||
} catch (e) {
|
||||
|
@ -35,9 +35,12 @@ export default (version) =>
|
||||
|
||||
const repoUrl = sh.stdout.substr(0, sh.stdout.lastIndexOf('.git'))
|
||||
const previousTag = sh.stdout.substr(sh.stdout.indexOf('\n') + 1).trim()
|
||||
const changelogData = fs.readFileSync(changelog, 'utf8')
|
||||
const changelogData = await fs.promises.readFile(changelog, 'utf8')
|
||||
const compareUrl = `${repoUrl}/compare/${previousTag}...v${version}`
|
||||
let tmpData = fs.readFileSync(`scripts/tmp/${tmpChangelog}`, 'utf8')
|
||||
let tmpData = await fs.promises.readFile(
|
||||
`scripts/tmp/${tmpChangelog}`,
|
||||
'utf8'
|
||||
)
|
||||
|
||||
LogHelper.success(`Remote origin URL gotten: ${repoUrl}.git`)
|
||||
LogHelper.success(`Previous tag gotten: ${previousTag}`)
|
||||
@ -46,14 +49,14 @@ export default (version) =>
|
||||
tmpData = tmpData.replace(version, `[${version}](${compareUrl})`)
|
||||
}
|
||||
|
||||
fs.writeFile(changelog, `${tmpData}${changelogData}`, (err) => {
|
||||
if (err) LogHelper.error(`Failed to write into file: ${err}`)
|
||||
else {
|
||||
fs.unlinkSync(`scripts/tmp/${tmpChangelog}`)
|
||||
try {
|
||||
await fs.promises.writeFile(changelog, `${tmpData}${changelogData}`)
|
||||
await fs.promises.unlink(`scripts/tmp/${tmpChangelog}`)
|
||||
LogHelper.success(`${changelog} generated`)
|
||||
resolve()
|
||||
} catch (error) {
|
||||
LogHelper.error(`Failed to write into file: ${error}`)
|
||||
}
|
||||
})
|
||||
} catch (e) {
|
||||
LogHelper.error(`Error during git commands: ${e}`)
|
||||
reject(e)
|
||||
|
@ -7,20 +7,20 @@ import { LogHelper } from '@/helpers/log-helper'
|
||||
* Set up Leon's core configuration
|
||||
*/
|
||||
export default () =>
|
||||
new Promise((resolve) => {
|
||||
new Promise(async (resolve) => {
|
||||
LogHelper.info('Configuring core...')
|
||||
|
||||
const dir = 'core/config'
|
||||
const list = (dir) => {
|
||||
const entities = fs.readdirSync(dir)
|
||||
const list = async (dir) => {
|
||||
const entities = await fs.promises.readdir(dir)
|
||||
|
||||
// Browse core config entities
|
||||
for (let i = 0; i < entities.length; i += 1) {
|
||||
const file = `${entities[i].replace('.sample.json', '.json')}`
|
||||
// Recursive if the entity is a directory
|
||||
const way = path.join(dir, entities[i])
|
||||
if (fs.statSync(way).isDirectory()) {
|
||||
list(way)
|
||||
if ((await fs.promises.stat(way)).isDirectory()) {
|
||||
await list(way)
|
||||
} else if (
|
||||
entities[i].indexOf('.sample.json') !== -1 &&
|
||||
!fs.existsSync(`${dir}/${file}`)
|
||||
@ -40,6 +40,6 @@ export default () =>
|
||||
}
|
||||
}
|
||||
|
||||
list(dir)
|
||||
await list(dir)
|
||||
resolve()
|
||||
})
|
||||
|
@ -136,7 +136,7 @@ SPACY_MODELS.set('fr', {
|
||||
// Delete .venv directory to reset the development environment
|
||||
if (hasDotVenv) {
|
||||
LogHelper.info(`Deleting ${dotVenvPath}...`)
|
||||
fs.rmSync(dotVenvPath, { recursive: true, force: true })
|
||||
await fs.promises.rm(dotVenvPath, { recursive: true, force: true })
|
||||
LogHelper.success(`${dotVenvPath} deleted`)
|
||||
}
|
||||
|
||||
@ -211,7 +211,7 @@ SPACY_MODELS.set('fr', {
|
||||
await installPythonPackages()
|
||||
} else {
|
||||
if (fs.existsSync(dotProjectPath)) {
|
||||
const dotProjectMtime = fs.statSync(dotProjectPath).mtime
|
||||
const dotProjectMtime = (await fs.promises.stat(dotProjectPath)).mtime
|
||||
|
||||
// Check if Python deps tree has been modified since the initial setup
|
||||
if (pipfileMtime > dotProjectMtime) {
|
||||
|
@ -31,10 +31,10 @@ export default () =>
|
||||
// Check if the config and config.sample file exist
|
||||
if (fs.existsSync(configFile) && fs.existsSync(configSampleFile)) {
|
||||
const config = JSON.parse(
|
||||
fs.readFileSync(configFile, 'utf8')
|
||||
await fs.promises.readFile(configFile, 'utf8')
|
||||
)?.configurations
|
||||
const configSample = JSON.parse(
|
||||
fs.readFileSync(configSampleFile, 'utf8')
|
||||
await fs.promises.readFile(configSampleFile, 'utf8')
|
||||
)?.configurations
|
||||
const configKeys = Object.keys(config)
|
||||
const configSampleKeys = Object.keys(configSample)
|
||||
|
@ -8,7 +8,7 @@ import { LogHelper } from '@/helpers/log-helper'
|
||||
* Add global entities annotations (@...)
|
||||
*/
|
||||
export default (lang, nlp) =>
|
||||
new Promise((resolve) => {
|
||||
new Promise(async (resolve) => {
|
||||
LogHelper.title('Global entities training')
|
||||
|
||||
const globalEntitiesPath = path.join(
|
||||
@ -18,7 +18,7 @@ export default (lang, nlp) =>
|
||||
lang,
|
||||
'global-entities'
|
||||
)
|
||||
const globalEntityFiles = fs.readdirSync(globalEntitiesPath)
|
||||
const globalEntityFiles = await fs.promises.readdir(globalEntitiesPath)
|
||||
const newEntitiesObj = {}
|
||||
|
||||
for (let i = 0; i < globalEntityFiles.length; i += 1) {
|
||||
@ -28,7 +28,9 @@ export default (lang, nlp) =>
|
||||
globalEntitiesPath,
|
||||
globalEntityFileName
|
||||
)
|
||||
const { options } = JSON.parse(fs.readFileSync(globalEntityPath, 'utf8'))
|
||||
const { options } = JSON.parse(
|
||||
await fs.promises.readFile(globalEntityPath, 'utf8')
|
||||
)
|
||||
const optionKeys = Object.keys(options)
|
||||
const optionsObj = {}
|
||||
|
||||
|
@ -35,7 +35,7 @@ export default (lang, nlp) =>
|
||||
)
|
||||
|
||||
if (fs.existsSync(configFilePath)) {
|
||||
const { actions, variables } = SkillDomainHelper.getSkillConfig(
|
||||
const { actions, variables } = await SkillDomainHelper.getSkillConfig(
|
||||
configFilePath,
|
||||
lang
|
||||
)
|
||||
|
@ -9,7 +9,7 @@ import { LogHelper } from '@/helpers/log-helper'
|
||||
* Train global resolvers
|
||||
*/
|
||||
export default (lang, nlp) =>
|
||||
new Promise((resolve) => {
|
||||
new Promise(async (resolve) => {
|
||||
LogHelper.title('Global resolvers training')
|
||||
|
||||
const resolversPath = path.join(
|
||||
@ -19,13 +19,13 @@ export default (lang, nlp) =>
|
||||
lang,
|
||||
'global-resolvers'
|
||||
)
|
||||
const resolverFiles = fs.readdirSync(resolversPath)
|
||||
const resolverFiles = await fs.promises.readdir(resolversPath)
|
||||
|
||||
for (let i = 0; i < resolverFiles.length; i += 1) {
|
||||
const resolverFileName = resolverFiles[i]
|
||||
const resolverPath = path.join(resolversPath, resolverFileName)
|
||||
const { name: resolverName, intents: resolverIntents } = JSON.parse(
|
||||
fs.readFileSync(resolverPath, 'utf8')
|
||||
await fs.promises.readFile(resolverPath, 'utf8')
|
||||
)
|
||||
const intentKeys = Object.keys(resolverIntents)
|
||||
|
||||
|
@ -27,7 +27,7 @@ export default (lang, nlp) =>
|
||||
)
|
||||
|
||||
if (fs.existsSync(configFilePath)) {
|
||||
const { resolvers } = SkillDomainHelper.getSkillConfig(
|
||||
const { resolvers } = await SkillDomainHelper.getSkillConfig(
|
||||
configFilePath,
|
||||
lang
|
||||
)
|
||||
|
@ -256,11 +256,11 @@ export default class Brain {
|
||||
* 2. Edit: server/src/intent-object.sample.json
|
||||
* 3. Run: npm run python-bridge
|
||||
*/
|
||||
private executeLogicActionSkill(
|
||||
private async executeLogicActionSkill(
|
||||
nluResult: NLUResult,
|
||||
utteranceId: string,
|
||||
intentObjectPath: string
|
||||
): void {
|
||||
): Promise<void> {
|
||||
// Ensure the process is empty (to be able to execute other processes outside of Brain)
|
||||
if (!this.skillProcess) {
|
||||
const slots: IntentObject['slots'] = {}
|
||||
@ -278,7 +278,10 @@ export default class Brain {
|
||||
)
|
||||
|
||||
try {
|
||||
fs.writeFileSync(intentObjectPath, JSON.stringify(intentObject))
|
||||
await fs.promises.writeFile(
|
||||
intentObjectPath,
|
||||
JSON.stringify(intentObject)
|
||||
)
|
||||
this.skillProcess = spawn(
|
||||
`${PYTHON_BRIDGE_BIN_PATH} "${intentObjectPath}"`,
|
||||
{ shell: true }
|
||||
@ -319,7 +322,7 @@ export default class Brain {
|
||||
skillConfigPath,
|
||||
classification: { action: actionName }
|
||||
} = nluResult
|
||||
const { actions } = SkillDomainHelper.getSkillConfig(
|
||||
const { actions } = await SkillDomainHelper.getSkillConfig(
|
||||
skillConfigPath,
|
||||
this._lang
|
||||
)
|
||||
@ -341,11 +344,9 @@ export default class Brain {
|
||||
const domainName = nluResult.classification.domain
|
||||
const skillName = nluResult.classification.skill
|
||||
const { name: domainFriendlyName } =
|
||||
SkillDomainHelper.getSkillDomainInfo(domainName)
|
||||
const { name: skillFriendlyName } = SkillDomainHelper.getSkillInfo(
|
||||
domainName,
|
||||
skillName
|
||||
)
|
||||
await SkillDomainHelper.getSkillDomainInfo(domainName)
|
||||
const { name: skillFriendlyName } =
|
||||
await SkillDomainHelper.getSkillInfo(domainName, skillName)
|
||||
|
||||
this.domainFriendlyName = domainFriendlyName
|
||||
this.skillFriendlyName = skillFriendlyName
|
||||
@ -464,7 +465,7 @@ export default class Brain {
|
||||
this._lang + '.json'
|
||||
)
|
||||
const { actions, entities: skillConfigEntities } =
|
||||
SkillDomainHelper.getSkillConfig(configFilePath, this._lang)
|
||||
await SkillDomainHelper.getSkillConfig(configFilePath, this._lang)
|
||||
const utteranceHasEntities = nluResult.entities.length > 0
|
||||
const { answers: rawAnswers } = nluResult
|
||||
let answers = rawAnswers
|
||||
|
@ -7,15 +7,17 @@ import { LogHelper } from '@/helpers/log-helper'
|
||||
import { StringHelper } from '@/helpers/string-helper'
|
||||
|
||||
const getDownloads = async (fastify, options) => {
|
||||
fastify.get(`/api/${options.apiVersion}/downloads`, (request, reply) => {
|
||||
fastify.get(
|
||||
`/api/${options.apiVersion}/downloads`,
|
||||
async (request, reply) => {
|
||||
LogHelper.title('GET /downloads')
|
||||
|
||||
const clean = (dir, files) => {
|
||||
const clean = async (dir, files) => {
|
||||
LogHelper.info('Cleaning skill download directory...')
|
||||
for (let i = 0; i < files.length; i += 1) {
|
||||
fs.unlinkSync(`${dir}/${files[i]}`)
|
||||
await fs.promises.unlink(`${dir}/${files[i]}`)
|
||||
}
|
||||
fs.rmdirSync(dir)
|
||||
await fs.promises.rmdir(dir)
|
||||
LogHelper.success('Downloads directory cleaned')
|
||||
}
|
||||
let message = ''
|
||||
@ -40,36 +42,25 @@ const getDownloads = async (fastify, options) => {
|
||||
const downloadsDir = `${dlDomainDir}/${request.query.skill}`
|
||||
|
||||
LogHelper.info('Reading downloads directory...')
|
||||
fs.readdir(downloadsDir, (err, files) => {
|
||||
if (err && err.code === 'ENOENT') {
|
||||
message = 'There is no content to download for this skill.'
|
||||
LogHelper.error(message)
|
||||
reply.code(404).send({
|
||||
success: false,
|
||||
status: 404,
|
||||
code: 'skill_dir_not_found',
|
||||
message
|
||||
})
|
||||
} else {
|
||||
if (err) LogHelper.error(err)
|
||||
|
||||
try {
|
||||
const files = await fs.promises.readdir(downloadsDir)
|
||||
// Download the file if there is only one
|
||||
if (files.length === 1) {
|
||||
LogHelper.info(`${files[0]} is downloading...`)
|
||||
reply.download(`${downloadsDir}/${files[0]}`)
|
||||
LogHelper.success(`${files[0]} downloaded`)
|
||||
clean(downloadsDir, files)
|
||||
await clean(downloadsDir, files)
|
||||
} else {
|
||||
LogHelper.info('Deleting previous archives...')
|
||||
const zipSlug = `leon-${request.query.domain}-${request.query.skill}`
|
||||
const domainsFiles = fs.readdirSync(dlDomainDir)
|
||||
const domainsFiles = await fs.promises.readdir(dlDomainDir)
|
||||
|
||||
for (let i = 0; i < domainsFiles.length; i += 1) {
|
||||
if (
|
||||
domainsFiles[i].indexOf('.zip') !== -1 &&
|
||||
domainsFiles[i].indexOf(zipSlug) !== -1
|
||||
) {
|
||||
fs.unlinkSync(`${dlDomainDir}/${domainsFiles[i]}`)
|
||||
await fs.promises.unlink(`${dlDomainDir}/${domainsFiles[i]}`)
|
||||
LogHelper.success(`${domainsFiles[i]} archive deleted`)
|
||||
}
|
||||
}
|
||||
@ -83,12 +74,12 @@ const getDownloads = async (fastify, options) => {
|
||||
// When the archive is ready
|
||||
output.on('close', () => {
|
||||
LogHelper.info(`${zipName} is downloading...`)
|
||||
reply.download(zipFile, (err) => {
|
||||
reply.download(zipFile, async (err) => {
|
||||
if (err) LogHelper.error(err)
|
||||
|
||||
LogHelper.success(`${zipName} downloaded`)
|
||||
|
||||
clean(downloadsDir, files)
|
||||
await clean(downloadsDir, files)
|
||||
})
|
||||
})
|
||||
archive.on('error', (err) => {
|
||||
@ -106,8 +97,19 @@ const getDownloads = async (fastify, options) => {
|
||||
LogHelper.info('Finalizing...')
|
||||
archive.finalize()
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
message = 'There is no content to download for this skill.'
|
||||
LogHelper.error(message)
|
||||
reply.code(404).send({
|
||||
success: false,
|
||||
status: 404,
|
||||
code: 'skill_dir_not_found',
|
||||
message
|
||||
})
|
||||
}
|
||||
LogHelper.error(message)
|
||||
}
|
||||
} else {
|
||||
message = 'This skill does not exist.'
|
||||
LogHelper.error(message)
|
||||
@ -128,7 +130,8 @@ const getDownloads = async (fastify, options) => {
|
||||
message
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export default getDownloads
|
||||
|
@ -65,7 +65,9 @@ export default class Conversation {
|
||||
/**
|
||||
* Activate context according to the triggered action
|
||||
*/
|
||||
public set activeContext(nluContext: ConversationContext) {
|
||||
public async setActiveContext(
|
||||
nluContext: ConversationContext
|
||||
): Promise<void> {
|
||||
const {
|
||||
slots,
|
||||
isInActionLoop,
|
||||
@ -79,7 +81,10 @@ export default class Conversation {
|
||||
const slotKeys = Object.keys(slots)
|
||||
const [skillName] = intent.split('.')
|
||||
const newContextName = `${domain}.${skillName}`
|
||||
const { actions } = SkillDomainHelper.getSkillConfig(skillConfigPath, lang)
|
||||
const { actions } = await SkillDomainHelper.getSkillConfig(
|
||||
skillConfigPath,
|
||||
lang
|
||||
)
|
||||
// Grab next action from the NLU data file
|
||||
const { next_action: nextAction } = actions[actionName] as {
|
||||
next_action: string
|
||||
@ -121,7 +126,7 @@ export default class Conversation {
|
||||
this._activeContext.name &&
|
||||
this._activeContext.name !== newContextName
|
||||
) {
|
||||
this.cleanActiveContext()
|
||||
await this.cleanActiveContext()
|
||||
}
|
||||
|
||||
/**
|
||||
@ -261,12 +266,12 @@ export default class Conversation {
|
||||
/**
|
||||
* Clean up active context
|
||||
*/
|
||||
public cleanActiveContext(): void {
|
||||
public async cleanActiveContext(): Promise<void> {
|
||||
LogHelper.title('Conversation')
|
||||
LogHelper.info('Clean active context')
|
||||
|
||||
this.pushToPreviousContextsStack()
|
||||
this._activeContext = DEFAULT_ACTIVE_CONTEXT
|
||||
await this.setActiveContext(DEFAULT_ACTIVE_CONTEXT)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -48,7 +48,7 @@ export class ActionLoop {
|
||||
NLU.nluResult
|
||||
)
|
||||
|
||||
const { actions, resolvers } = SkillDomainHelper.getSkillConfig(
|
||||
const { actions, resolvers } = await SkillDomainHelper.getSkillConfig(
|
||||
skillConfigPath,
|
||||
BRAIN.lang
|
||||
)
|
||||
@ -72,10 +72,10 @@ export class ActionLoop {
|
||||
const result = await nlpObjs[expectedItemType].process(utterance)
|
||||
const { intent } = result
|
||||
|
||||
const resolveResolvers = (
|
||||
const resolveResolvers = async (
|
||||
resolver: string,
|
||||
intent: string
|
||||
): [ResolveResolversResult] => {
|
||||
): Promise<[ResolveResolversResult]> => {
|
||||
const resolversPath = join(
|
||||
process.cwd(),
|
||||
'core',
|
||||
@ -87,7 +87,10 @@ export class ActionLoop {
|
||||
const resolvedIntents = !intent.includes('resolver.global')
|
||||
? resolvers && resolvers[resolver]
|
||||
: JSON.parse(
|
||||
fs.readFileSync(join(resolversPath, `${resolver}.json`), 'utf8')
|
||||
await fs.promises.readFile(
|
||||
join(resolversPath, `${resolver}.json`),
|
||||
'utf8'
|
||||
)
|
||||
)
|
||||
|
||||
// E.g. resolver.global.denial -> denial
|
||||
@ -109,7 +112,10 @@ export class ActionLoop {
|
||||
) {
|
||||
LogHelper.title('NLU')
|
||||
LogHelper.success('Resolvers resolved:')
|
||||
NLU.nluResult.resolvers = resolveResolvers(expectedItemName, intent)
|
||||
NLU.nluResult.resolvers = await resolveResolvers(
|
||||
expectedItemName,
|
||||
intent
|
||||
)
|
||||
NLU.nluResult.resolvers.forEach((resolver) =>
|
||||
LogHelper.success(`${intent}: ${JSON.stringify(resolver)}`)
|
||||
)
|
||||
@ -120,7 +126,7 @@ export class ActionLoop {
|
||||
// Ensure expected items are in the utterance, otherwise clean context and reprocess
|
||||
if (!hasMatchingEntity && !hasMatchingResolver) {
|
||||
BRAIN.talk(`${BRAIN.wernicke('random_context_out_of_topic')}.`)
|
||||
NLU.conversation.cleanActiveContext()
|
||||
await NLU.conversation.cleanActiveContext()
|
||||
await NLU.process(utterance)
|
||||
return null
|
||||
}
|
||||
@ -131,7 +137,7 @@ export class ActionLoop {
|
||||
if (processedData.core?.restart === true) {
|
||||
const { originalUtterance } = NLU.conversation.activeContext
|
||||
|
||||
NLU.conversation.cleanActiveContext()
|
||||
await NLU.conversation.cleanActiveContext()
|
||||
|
||||
if (originalUtterance !== null) {
|
||||
await NLU.process(originalUtterance)
|
||||
@ -148,7 +154,7 @@ export class ActionLoop {
|
||||
!processedData.action?.next_action &&
|
||||
processedData.core?.isInActionLoop === false
|
||||
) {
|
||||
NLU.conversation.cleanActiveContext()
|
||||
await NLU.conversation.cleanActiveContext()
|
||||
return null
|
||||
}
|
||||
|
||||
|
@ -78,7 +78,7 @@ export default class NER {
|
||||
const utterance = `${StringHelper.removeEndPunctuation(
|
||||
nluResult.utterance
|
||||
)} `
|
||||
const { actions } = SkillDomainHelper.getSkillConfig(
|
||||
const { actions } = await SkillDomainHelper.getSkillConfig(
|
||||
skillConfigPath,
|
||||
lang
|
||||
)
|
||||
|
@ -252,9 +252,9 @@ export default class NLU {
|
||||
|
||||
const newContextName = `${this.nluResult.classification.domain}.${skillName}`
|
||||
if (this.conversation.activeContext.name !== newContextName) {
|
||||
this.conversation.cleanActiveContext()
|
||||
await this.conversation.cleanActiveContext()
|
||||
}
|
||||
this.conversation.activeContext = {
|
||||
await this.conversation.setActiveContext({
|
||||
...DEFAULT_ACTIVE_CONTEXT,
|
||||
lang: BRAIN.lang,
|
||||
slots: {},
|
||||
@ -265,7 +265,7 @@ export default class NLU {
|
||||
domain: this.nluResult.classification.domain,
|
||||
intent,
|
||||
entities: this.nluResult.entities
|
||||
}
|
||||
})
|
||||
// Pass current utterance entities to the NLU result object
|
||||
this.nluResult.currentEntities =
|
||||
this.conversation.activeContext.currentEntities
|
||||
@ -277,8 +277,8 @@ export default class NLU {
|
||||
|
||||
// Prepare next action if there is one queuing
|
||||
if (processedData.nextAction) {
|
||||
this.conversation.cleanActiveContext()
|
||||
this.conversation.activeContext = {
|
||||
await this.conversation.cleanActiveContext()
|
||||
await this.conversation.setActiveContext({
|
||||
...DEFAULT_ACTIVE_CONTEXT,
|
||||
lang: BRAIN.lang,
|
||||
slots: {},
|
||||
@ -289,7 +289,7 @@ export default class NLU {
|
||||
domain: processedData.classification?.domain || '',
|
||||
intent: `${processedData.classification?.skill}.${processedData.action?.next_action}`,
|
||||
entities: []
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const processingTimeEnd = Date.now()
|
||||
|
@ -28,7 +28,7 @@ export class SlotFilling {
|
||||
if (processedData && Object.keys(processedData).length > 0) {
|
||||
// Set new context with the next action if there is one
|
||||
if (processedData.action?.next_action) {
|
||||
NLU.conversation.activeContext = {
|
||||
await NLU.conversation.setActiveContext({
|
||||
...DEFAULT_ACTIVE_CONTEXT,
|
||||
lang: BRAIN.lang,
|
||||
slots: processedData.slots || {},
|
||||
@ -39,7 +39,7 @@ export class SlotFilling {
|
||||
domain: processedData.classification?.domain || '',
|
||||
intent: `${processedData.classification?.skill}.${processedData.action.next_action}`,
|
||||
entities: []
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -124,12 +124,12 @@ export class SlotFilling {
|
||||
}
|
||||
}
|
||||
|
||||
NLU.conversation.cleanActiveContext()
|
||||
await NLU.conversation.cleanActiveContext()
|
||||
|
||||
return BRAIN.execute(NLU.nluResult)
|
||||
}
|
||||
|
||||
NLU.conversation.cleanActiveContext()
|
||||
await NLU.conversation.cleanActiveContext()
|
||||
return null
|
||||
}
|
||||
|
||||
@ -145,7 +145,7 @@ export class SlotFilling {
|
||||
const hasMandatorySlots = Object.keys(slots)?.length > 0
|
||||
|
||||
if (hasMandatorySlots) {
|
||||
NLU.conversation.activeContext = {
|
||||
await NLU.conversation.setActiveContext({
|
||||
...DEFAULT_ACTIVE_CONTEXT,
|
||||
lang: BRAIN.lang,
|
||||
slots,
|
||||
@ -156,12 +156,12 @@ export class SlotFilling {
|
||||
domain: NLU.nluResult.classification.domain,
|
||||
intent,
|
||||
entities: NLU.nluResult.entities
|
||||
}
|
||||
})
|
||||
|
||||
const notFilledSlot = NLU.conversation.getNotFilledSlot()
|
||||
// Loop for questions if a slot hasn't been filled
|
||||
if (notFilledSlot) {
|
||||
const { actions } = SkillDomainHelper.getSkillConfig(
|
||||
const { actions } = await SkillDomainHelper.getSkillConfig(
|
||||
NLU.nluResult.skillConfigPath,
|
||||
BRAIN.lang
|
||||
)
|
||||
|
@ -60,12 +60,12 @@ class Synchronizer {
|
||||
* Google Drive synchronization method
|
||||
*/
|
||||
googleDrive() {
|
||||
return new Promise((resolve, reject) => {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const driveFolderName = `leon-${this.classification.domain}-${this.classification.skill}`
|
||||
const folderMimeType = 'application/vnd.google-apps.folder'
|
||||
const entities = fs.readdirSync(this.downloadDir)
|
||||
const entities = await fs.promises.readdir(this.downloadDir)
|
||||
const key = JSON.parse(
|
||||
fs.readFileSync(
|
||||
await fs.promises.readFile(
|
||||
path.join(
|
||||
process.cwd(),
|
||||
'core/config/synchronizer/google-drive.json'
|
||||
|
@ -110,7 +110,7 @@ export default class TTS {
|
||||
)
|
||||
} else {
|
||||
const { audioFilePath, duration } = result
|
||||
const bitmap = fs.readFileSync(audioFilePath)
|
||||
const bitmap = await fs.promises.readFile(audioFilePath)
|
||||
|
||||
SOCKET_SERVER.socket.emit(
|
||||
'audio-forwarded',
|
||||
|
@ -37,23 +37,28 @@ export class SkillDomainHelper {
|
||||
const skillDomains = new Map<string, SkillDomain>()
|
||||
|
||||
await Promise.all(
|
||||
fs.readdirSync(DOMAINS_DIR).map(async (entity) => {
|
||||
(
|
||||
await fs.promises.readdir(DOMAINS_DIR)
|
||||
).map(async (entity) => {
|
||||
const domainPath = path.join(DOMAINS_DIR, entity)
|
||||
|
||||
if (fs.statSync(domainPath).isDirectory()) {
|
||||
if ((await fs.promises.stat(domainPath)).isDirectory()) {
|
||||
const skills: SkillDomain['skills'] = {}
|
||||
const { name: domainName } = (await import(
|
||||
path.join(domainPath, 'domain.json')
|
||||
)) as DomainSchema
|
||||
const skillFolders = fs.readdirSync(domainPath)
|
||||
const skillFolders = await fs.promises.readdir(domainPath)
|
||||
|
||||
for (let i = 0; i < skillFolders.length; i += 1) {
|
||||
const skillAliasName = skillFolders[i] as string
|
||||
const skillPath = path.join(domainPath, skillAliasName)
|
||||
|
||||
if (fs.statSync(skillPath).isDirectory()) {
|
||||
if ((await fs.promises.stat(skillPath)).isDirectory()) {
|
||||
const { name: skillName, bridge: skillBridge } = JSON.parse(
|
||||
fs.readFileSync(path.join(skillPath, 'skill.json'), 'utf8')
|
||||
await fs.promises.readFile(
|
||||
path.join(skillPath, 'skill.json'),
|
||||
'utf8'
|
||||
)
|
||||
) as SkillSchema
|
||||
|
||||
skills[skillName] = {
|
||||
@ -83,9 +88,14 @@ export class SkillDomainHelper {
|
||||
* Get information of a specific domain
|
||||
* @param domain Domain to get info from
|
||||
*/
|
||||
public static getSkillDomainInfo(domain: SkillDomain['name']): DomainSchema {
|
||||
public static async getSkillDomainInfo(
|
||||
domain: SkillDomain['name']
|
||||
): Promise<DomainSchema> {
|
||||
return JSON.parse(
|
||||
fs.readFileSync(path.join(DOMAINS_DIR, domain, 'domain.json'), 'utf8')
|
||||
await fs.promises.readFile(
|
||||
path.join(DOMAINS_DIR, domain, 'domain.json'),
|
||||
'utf8'
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
@ -94,12 +104,12 @@ export class SkillDomainHelper {
|
||||
* @param domain Domain where the skill belongs
|
||||
* @param skill Skill to get info from
|
||||
*/
|
||||
public static getSkillInfo(
|
||||
public static async getSkillInfo(
|
||||
domain: SkillDomain['name'],
|
||||
skill: SkillSchema['name']
|
||||
): SkillSchema {
|
||||
): Promise<SkillSchema> {
|
||||
return JSON.parse(
|
||||
fs.readFileSync(
|
||||
await fs.promises.readFile(
|
||||
path.join(DOMAINS_DIR, domain, skill, 'skill.json'),
|
||||
'utf8'
|
||||
)
|
||||
@ -111,13 +121,13 @@ export class SkillDomainHelper {
|
||||
* @param configFilePath Path of the skill config file
|
||||
* @param lang Language short code
|
||||
*/
|
||||
public static getSkillConfig(
|
||||
public static async getSkillConfig(
|
||||
configFilePath: string,
|
||||
lang: ShortLanguageCode
|
||||
): SkillConfigWithGlobalEntities {
|
||||
): Promise<SkillConfigWithGlobalEntities> {
|
||||
const sharedDataPath = path.join(process.cwd(), 'core', 'data', lang)
|
||||
const configData = JSON.parse(
|
||||
fs.readFileSync(configFilePath, 'utf8')
|
||||
await fs.promises.readFile(configFilePath, 'utf8')
|
||||
) as SkillConfigSchema
|
||||
const result: SkillConfigWithGlobalEntities = {
|
||||
...configData,
|
||||
@ -129,13 +139,14 @@ export class SkillDomainHelper {
|
||||
if (entities) {
|
||||
const entitiesKeys = Object.keys(entities)
|
||||
|
||||
entitiesKeys.forEach((entity) => {
|
||||
await Promise.all(
|
||||
entitiesKeys.map(async (entity) => {
|
||||
if (typeof entities[entity] === 'string') {
|
||||
const entityFilePath = path.join(
|
||||
sharedDataPath,
|
||||
entities[entity] as string
|
||||
)
|
||||
const entityRawData = fs.readFileSync(entityFilePath, {
|
||||
const entityRawData = await fs.promises.readFile(entityFilePath, {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
|
||||
@ -144,6 +155,7 @@ export class SkillDomainHelper {
|
||||
) as GlobalEntitySchema
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
configData.entities = entities
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user