1
1
mirror of https://github.com/leon-ai/leon.git synced 2024-12-01 03:15:58 +03:00

Merge branch 'develop' into speedtest_package

This commit is contained in:
Florian / Fkeloks 2019-03-17 16:41:59 +01:00 committed by GitHub
commit ae1926ea90
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
46 changed files with 1045 additions and 277 deletions

View File

@ -1,2 +1,5 @@
node_modules/
bridges/python/.venv/*
.env
!bridges/python/.venv/.gitkeep

1
.gitignore vendored
View File

@ -13,6 +13,7 @@ bin/flite/*
*.pyc
.DS_Store
*.sublime-workspace
npm-debug.log
debug.log
.env

View File

@ -3,8 +3,7 @@ WORKDIR /app
COPY . .
# Install system packages
RUN apk update --no-cache \
&& apk add --no-cache \
RUN apk add --no-cache \
ca-certificates \
build-base \
python3 \

View File

@ -3,7 +3,7 @@
import utils
from sys import argv, path
from json import dumps
from json import dumps, loads
from importlib import import_module
def main():
@ -11,13 +11,10 @@ def main():
path.append('.')
lang = argv[1]
package = argv[2]
module = argv[3]
string = argv[4]
m = import_module('packages.' + package + '.' + module)
queryobj = utils.getqueryobj()
m = import_module('packages.' + queryobj['package'] + '.' + queryobj['module'])
return getattr(m, module)(string)
return getattr(m, queryobj['module'])(queryobj['query'], queryobj['entities'])
if __name__ == '__main__':
main()

View File

@ -2,7 +2,7 @@
# -*- coding:utf-8 -*-
from json import loads, dumps
from os import path
from os import path, environ
from pathlib import Path
from random import choice
from sys import argv, stdout
@ -14,10 +14,18 @@ import sqlite3
import requests
dirname = path.dirname(path.realpath(__file__))
lang = argv[1]
package = argv[2]
module = argv[3]
istring = argv[4]
queryobjectpath = argv[1]
serversrc = 'dist' if environ.get('LEON_NODE_ENV') == 'production' else 'src'
queryobjfile = open(queryobjectpath, 'r', encoding = 'utf8')
queryobj = loads(queryobjfile.read())
queryobjfile.close()
def getqueryobj():
"""Return query object"""
return queryobj
def translate(key, d = { }):
"""Pickup the language file according to the cmd arg
@ -25,11 +33,11 @@ def translate(key, d = { }):
output = ''
file = open(dirname + '/../../packages/' + package + '/' + 'data/answers/' + lang + '.json', 'r', encoding = 'utf8')
file = open(dirname + '/../../packages/' + queryobj['package'] + '/' + 'data/answers/' + queryobj['lang'] + '.json', 'r', encoding = 'utf8')
obj = loads(file.read())
file.close()
prop = obj[module][key]
prop = obj[queryobj['module']][key]
if isinstance(prop, list):
output = choice(prop)
else:
@ -48,10 +56,11 @@ def output(type, code, speech = ''):
"""Communicate with the Core"""
print(dumps({
'package': package,
'module': module,
'lang': lang,
'input': istring,
'package': queryobj['package'],
'module': queryobj['module'],
'lang': queryobj['lang'],
'input': queryobj['query'],
'entities': queryobj['entities'],
'output': {
'type': type,
'code': code,
@ -63,11 +72,6 @@ def output(type, code, speech = ''):
if (type == 'inter'):
stdout.flush()
def finddomains(string):
"""Find a domain name substring from a string"""
return findall('[a-z0-9\-]{,63}\.[a-z0-9\-\.]{2,191}', string.lower())
def http(method, url):
"""Send HTTP request with the Leon user agent"""
@ -79,11 +83,11 @@ def http(method, url):
def config(key):
"""Get a package configuration value"""
file = open(dirname + '/../../packages/' + package + '/config/config.json', 'r', encoding = 'utf8')
file = open(dirname + '/../../packages/' + queryobj['package'] + '/config/config.json', 'r', encoding = 'utf8')
obj = loads(file.read())
file.close()
return obj[module][key]
return obj[queryobj['module']][key]
def info():
"""Get information from the current query"""
@ -94,7 +98,7 @@ def createdldir():
"""Create the downloads folder of a current module"""
dldir = path.dirname(path.realpath(__file__)) + '/../../downloads/'
moduledldir = dldir + package + '/' + module
moduledldir = dldir + queryobj['package'] + '/' + queryobj['module']
Path(moduledldir).mkdir(parents = True, exist_ok = True)
@ -105,6 +109,6 @@ def db(dbtype = 'tinydb'):
for a specific package"""
if dbtype == 'tinydb':
db = TinyDB(dirname + '/../../packages/' + package + '/data/db/' + package + '.json')
db = TinyDB(dirname + '/../../packages/' + queryobj['package'] + '/data/db/' + queryobj['package'] + '.json')
return { 'db': db, 'query': Query, 'operations': operations }

View File

@ -2,7 +2,7 @@
"langs": {
"en-US": {
"short": "en",
"min_confidence": 0.8,
"min_confidence": 0.6,
"fallbacks": [
]
},

871
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -32,7 +32,7 @@
"preinstall": "node scripts/setup/preinstall.js",
"postinstall": "babel-node scripts/setup/setup.js",
"dev:app": "npm run build:app && babel-node scripts/app/dev-app.js",
"dev:server": "npm run train expressions && nodemon --watch server ./server/src/index.js --exec babel-node",
"dev:server": "npm run train expressions && nodemon --watch server ./server/src/index.js --ignore server/src/tmp/ --exec babel-node",
"wake": "cross-env LEON_SERVER_HOST=localhost LEON_SERVER_PORT=1337 node hotword/index.js",
"delete-dist:server": "shx rm -rf ./server/dist",
"build": "npm run lint && npm run build:app && npm run build:server",
@ -63,7 +63,7 @@
"fluent-ffmpeg": "^2.1.2",
"googleapis": "^25.0.0",
"moment-timezone": "^0.5.14",
"natural": "^0.2.1",
"node-nlp": "^2.4.2",
"node-wav": "0.0.2",
"socket.io": "^2.0.2",
"superagent": "^3.5.2",
@ -89,6 +89,7 @@
"husky": "^0.14.3",
"inquirer": "^5.1.0",
"jest": "^24.1.0",
"jest-canvas-mock": "^2.0.0-alpha.3",
"jest-extended": "^0.11.1",
"json": "^9.0.6",
"nodemon": "^1.18.9",

View File

@ -4,12 +4,16 @@
import requests
import utils
def isitdown(string):
def isitdown(string, entities):
"""Check if a website is down or not"""
domains = utils.finddomains(string)
domains = []
output = ''
for item in entities:
if item['entity'] == 'url':
domains.append(item['resolution']['value'].lower())
for i, domain in enumerate(domains):
state = 'up'
websitename = domain[:domain.find('.')].title()

View File

@ -3,7 +3,7 @@
describe('checker:isitdown', async () => {
test('detects invalid domain name', async () => {
global.nlu.brain.execute = jest.fn()
global.nlu.process('Check if github is up')
await global.nlu.process('Check if github is up')
const [obj] = global.nlu.brain.execute.mock.calls
await global.brain.execute(obj[0])
@ -13,7 +13,7 @@ describe('checker:isitdown', async () => {
test('detects down domain name', async () => {
global.nlu.brain.execute = jest.fn()
global.nlu.process('Check if fakedomainnametotestleon.fr is up')
await global.nlu.process('Check if fakedomainnametotestleon.fr is up')
const [obj] = global.nlu.brain.execute.mock.calls
await global.brain.execute(obj[0])
@ -24,7 +24,7 @@ describe('checker:isitdown', async () => {
test('detects up domain name', async () => {
global.nlu.brain.execute = jest.fn()
global.nlu.process('Check if github.com is up')
await global.nlu.process('Check if github.com is up')
const [obj] = global.nlu.brain.execute.mock.calls
await global.brain.execute(obj[0])
@ -35,7 +35,7 @@ describe('checker:isitdown', async () => {
test('detects up domain names', async () => {
global.nlu.brain.execute = jest.fn()
global.nlu.process('Check if github.com and nodejs.org are up')
await global.nlu.process('Check if github.com and nodejs.org are up')
const [obj] = global.nlu.brain.execute.mock.calls
await global.brain.execute(obj[0])

View File

@ -3,7 +3,7 @@
import utils
def bye(string):
def bye(string, entities):
"""Leon says good bye"""
return utils.output('end', 'good_bye', utils.translate('good_bye'))

View File

@ -5,7 +5,7 @@ import utils
from datetime import datetime
from random import randint
def greeting(string):
def greeting(string, entities):
"""Leon greets you"""
time = datetime.time(datetime.now())

View File

@ -3,7 +3,7 @@
import utils
def joke(string):
def joke(string, entities):
"""Leon says some jokes"""
return utils.output('end', 'jokes', utils.translate('jokes'))

View File

@ -3,7 +3,7 @@
import utils
def meaningoflife(string):
def meaningoflife(string, entities):
"""Leon says what's the meaning of life"""
return utils.output('end', 'meaning_of_life', utils.translate('meaning_of_life'))

View File

@ -3,7 +3,7 @@
import utils
def partnerassistant(string):
def partnerassistant(string, entities):
"""Leon tells you about other personal assistants"""
string = string.lower()

View File

@ -4,7 +4,7 @@
import utils
from random import randint
def randomnumber(string):
def randomnumber(string, entities):
"""Leon gives a random number"""
return utils.output('end', 'success', randint(0, 100))

View File

@ -3,7 +3,7 @@
describe('leon:bye', async () => {
test('says bye', async () => {
global.nlu.brain.execute = jest.fn()
global.nlu.process('Bye bye')
await global.nlu.process('Bye bye')
const [obj] = global.nlu.brain.execute.mock.calls
await global.brain.execute(obj[0])

View File

@ -3,7 +3,7 @@
describe('leon:greeting', async () => {
test('greets', async () => {
global.nlu.brain.execute = jest.fn()
global.nlu.process('Hello')
await global.nlu.process('Hello')
const [obj] = global.nlu.brain.execute.mock.calls
await global.brain.execute(obj[0])

View File

@ -3,7 +3,7 @@
describe('leon:joke', async () => {
test('tells a joke', async () => {
global.nlu.brain.execute = jest.fn()
global.nlu.process('Tell me a joke')
await global.nlu.process('Tell me a joke')
const [obj] = global.nlu.brain.execute.mock.calls
await global.brain.execute(obj[0])

View File

@ -3,7 +3,7 @@
describe('leon:meaningoflife', async () => {
test('says the meaning of life', async () => {
global.nlu.brain.execute = jest.fn()
global.nlu.process('What is the meaning of life?')
await global.nlu.process('What is the meaning of life?')
const [obj] = global.nlu.brain.execute.mock.calls
await global.brain.execute(obj[0])

View File

@ -3,7 +3,7 @@
describe('leon:partnerassistant', async () => {
test('does not know this personal assistant', async () => {
global.nlu.brain.execute = jest.fn()
global.nlu.process('Tell me about the personal assistant Louistiti')
await global.nlu.process('Tell me about the personal assistant Louistiti')
const [obj] = global.nlu.brain.execute.mock.calls
await global.brain.execute(obj[0])
@ -13,7 +13,7 @@ describe('leon:partnerassistant', async () => {
test('talks about the personal assistant Alexa', async () => {
global.nlu.brain.execute = jest.fn()
global.nlu.process('Tell me about the personal assistant Alexa')
await global.nlu.process('Tell me about the personal assistant Alexa')
const [obj] = global.nlu.brain.execute.mock.calls
await global.brain.execute(obj[0])

View File

@ -3,7 +3,7 @@
describe('leon:randomnumber', async () => {
test('gives a random number between 0 and 100', async () => {
global.nlu.brain.execute = jest.fn()
global.nlu.process('Give me a random number')
await global.nlu.process('Give me a random number')
const [obj] = global.nlu.brain.execute.mock.calls
await global.brain.execute(obj[0])

View File

@ -3,7 +3,7 @@
describe('leon:welcome', async () => {
test('welcomes', async () => {
global.nlu.brain.execute = jest.fn()
global.nlu.process('Thank you')
await global.nlu.process('Thank you')
const [obj] = global.nlu.brain.execute.mock.calls
await global.brain.execute(obj[0])

View File

@ -3,7 +3,7 @@
describe('leon:whoami', async () => {
test('introduces himself', async () => {
global.nlu.brain.execute = jest.fn()
global.nlu.process('Who are you?')
await global.nlu.process('Who are you?')
const [obj] = global.nlu.brain.execute.mock.calls
await global.brain.execute(obj[0])

View File

@ -3,7 +3,7 @@
import utils
def welcome(string):
def welcome(string, entities):
"""Leon welcomes you"""
return utils.output('end', 'welcome', utils.translate('welcome'))

View File

@ -3,7 +3,7 @@
import utils
def whoami(string):
def whoami(string, entities):
"""Leon introduces himself"""
return utils.output('end', 'introduction', utils.translate('introduction'))

View File

@ -3,13 +3,13 @@
describe('videodownloader:youtube', async () => {
test('requests YouTube', async () => {
global.nlu.brain.execute = jest.fn()
global.nlu.process('Download new videos from YouTube')
await global.nlu.process('Download new videos from YouTube')
const [obj] = global.nlu.brain.execute.mock.calls
await global.brain.execute(obj[0])
expect(global.brain.interOutput.code).toBe('reaching_playlist')
expect([
await expect(global.brain.interOutput.code).toBe('reaching_playlist')
await expect([
'settings_error',
'request_error',
'nothing_to_download',

View File

@ -7,7 +7,7 @@ import utils
from time import time
from pytube import YouTube
def youtube(string):
def youtube(string, entities):
"""Download new videos from a YouTube playlist"""
db = utils.db()['db']

View File

@ -0,0 +1 @@
{"lang":"en","package":"leon","module":"randomnumber","query":"Give me a random number","entities":[]}

View File

@ -78,7 +78,7 @@ export default () => new Promise(async (resolve, reject) => {
// Module execution checking
try {
const p = await shell('pipenv run python bridges/python/main.py en leon randomnumber "Give me a random number"')
const p = await shell('pipenv run python bridges/python/main.py scripts/assets/query-object.json')
log.info(p.cmd)
log.success(`${p.stdout}\n`)
} catch (e) {

View File

@ -3,7 +3,7 @@ import { shell } from 'execa'
import log from '@/helpers/log'
/**
* update version number in files which need version number
* Update version number in files which need version number
*/
export default version => new Promise(async (resolve, reject) => {
log.info('Updating version...')

View File

@ -1,4 +1,4 @@
import { LogisticRegressionClassifier } from 'natural'
import { NlpManager } from 'node-nlp'
import dotenv from 'dotenv'
import fs from 'fs'
import path from 'path'
@ -16,7 +16,7 @@ dotenv.config()
* npm run train expressions
* npm run train expressions:en
*/
export default () => new Promise((resolve, reject) => {
export default () => new Promise(async (resolve, reject) => {
const { argv } = process
const packagesDir = 'packages'
const expressionsClassifier = 'server/src/data/expressions/classifier.json'
@ -31,12 +31,10 @@ export default () => new Promise((resolve, reject) => {
try {
if (type === 'expressions') {
let classifier = new LogisticRegressionClassifier()
let manager = new NlpManager({ languages: ['en'] })
if (lang !== 'en') {
// eslint-disable-next-line global-require, import/no-dynamic-require
const PorterStemmer = require(`../node_modules/natural/lib/natural/stemmers/porter_stemmer_${lang}`)
classifier = new LogisticRegressionClassifier(PorterStemmer)
manager = new NlpManager({ languages: lang })
}
const packages = fs.readdirSync(packagesDir)
.filter(entity =>
@ -52,14 +50,16 @@ export default () => new Promise((resolve, reject) => {
for (let j = 0; j < modules.length; j += 1) {
const exprs = expressions[modules[j]]
for (let k = 0; k < exprs.length; k += 1) {
classifier.addDocument(string.removeAccents(exprs[k]), `${packages[i]}:${modules[j]}`)
manager.addDocument(lang, exprs[k], `${packages[i]}:${modules[j]}`)
}
log.success(`"${string.ucfirst(modules[j])}" module expressions trained`)
}
}
classifier.save(expressionsClassifier, (err) => {
await manager.train()
fs.writeFile(expressionsClassifier, manager.export(true), (err) => {
if (err) {
log.error(`Failed to save the classifier: ${err}`)
reject()

View File

@ -34,6 +34,17 @@ class Brain {
}
}
/**
* Delete query object file
*/
static deleteQueryObjFile (queryObjectPath) {
try {
fs.unlinkSync(queryObjectPath)
} catch (e) {
log.error(`Failed to delete query object file: ${e}`)
}
}
/**
* Make Leon talk
*/
@ -82,95 +93,115 @@ class Brain {
*/
execute (obj) {
return new Promise((resolve, reject) => {
const queryId = `${Date.now()}-${string.random(4)}`
const queryObjectPath = `${__dirname}/../tmp/${queryId}.json`
// Ask to repeat if Leon is not sure about the request
if (obj.classification.confidence < langs[process.env.LEON_LANG].min_confidence) {
this.talk(`${this.wernicke('random_not_sure')}.`)
this.socket.emit('is-typing', false)
resolve()
}
} else {
// Ensure the process is empty (to be able to execute other processes outside of Brain)
if (Object.keys(this.process).length === 0) {
/**
* Execute a module in a standalone way (CLI):
*
* 1. Need to be at the root of the project
* 2. Edit: server/src/query-object.sample.json
* 3. Run: PIPENV_PIPFILE=bridges/python/Pipfile pipenv run
* python bridges/python/main.py server/src/query-object.sample.json
*/
const queryObj = {
id: queryId,
lang: langs[process.env.LEON_LANG].short,
package: obj.classification.package,
module: obj.classification.module,
query: obj.query,
entities: obj.entities
}
// Ensure the process is empty (to be able to execute other processes outside of Brain)
if (Object.keys(this.process).length === 0) {
/**
* Execute a module in a standalone way (CLI):
*
* 1. Need to be at the root of the project
* 2. PIPENV_PIPFILE=bridges/python/Pipfile pipenv run
* python bridges/python/main.py en leon whoami "Who are you?"
*/
this.process = spawn(`pipenv run python bridges/python/main.py ${langs[process.env.LEON_LANG].short} ${obj.classification.package} ${obj.classification.module} "${obj.query}"`, { shell: true })
}
try {
fs.writeFileSync(queryObjectPath, JSON.stringify(queryObj))
this.process = spawn(`pipenv run python bridges/python/main.py ${queryObjectPath}`, { shell: true })
} catch (e) {
log.error(`Failed to save query object: ${e}`)
}
}
const packageName = string.ucfirst(obj.classification.package)
const moduleName = string.ucfirst(obj.classification.module)
let output = ''
const packageName = string.ucfirst(obj.classification.package)
const moduleName = string.ucfirst(obj.classification.module)
let output = ''
// Read output
this.process.stdout.on('data', (data) => {
const obj = JSON.parse(data.toString())
// Read output
this.process.stdout.on('data', (data) => {
const obj = JSON.parse(data.toString())
if (typeof obj === 'object') {
if (obj.output.type === 'inter') {
log.title(`${packageName} package`)
log.info(data.toString())
if (typeof obj === 'object') {
if (obj.output.type === 'inter') {
log.title(`${packageName} package`)
log.info(data.toString())
this.interOutput = obj.output
this.talk(obj.output.speech.toString())
this.interOutput = obj.output
this.talk(obj.output.speech.toString())
} else {
output += data
}
} else {
output += data
/* istanbul ignore next */
reject({ type: 'warning', obj: new Error(`The ${moduleName} module of the ${packageName} package is not well configured. Check the configuration file.`) })
}
} else {
/* istanbul ignore next */
reject({ type: 'warning', obj: new Error(`The ${moduleName} module of the ${packageName} package is not well configured. Check the configuration file.`) })
}
})
})
// Handle error
this.process.stderr.on('data', (data) => {
this.talk(`${this.wernicke('random_package_module_errors', '',
{ '%module_name%': moduleName, '%package_name%': packageName })}!`)
this.socket.emit('is-typing', false)
// Handle error
this.process.stderr.on('data', (data) => {
this.talk(`${this.wernicke('random_package_module_errors', '',
{ '%module_name%': moduleName, '%package_name%': packageName })}!`)
Brain.deleteQueryObjFile(queryObjectPath)
this.socket.emit('is-typing', false)
log.title(packageName)
reject({ type: 'error', obj: data })
})
log.title(packageName)
reject({ type: 'error', obj: data })
})
// Catch the end of the module execution
this.process.stdout.on('end', () => {
log.title(`${packageName} package`)
log.info(output)
// Catch the end of the module execution
this.process.stdout.on('end', () => {
log.title(`${packageName} package`)
log.info(output)
this.finalOutput = output
this.finalOutput = output
// Check if there is an output (no module error)
if (this.finalOutput !== '') {
this.finalOutput = JSON.parse(this.finalOutput).output
this.talk(this.finalOutput.speech.toString())
// Check if there is an output (no module error)
if (this.finalOutput !== '') {
this.finalOutput = JSON.parse(this.finalOutput).output
this.talk(this.finalOutput.speech.toString())
/* istanbul ignore next */
// Synchronize the downloaded content if enabled
if (this.finalOutput.type === 'end' && this.finalOutput.options.synchronization && this.finalOutput.options.synchronization.enabled &&
this.finalOutput.options.synchronization.enabled === true) {
const sync = new Synchronizer(
this,
obj.classification,
this.finalOutput.options.synchronization
)
/* istanbul ignore next */
// Synchronize the downloaded content if enabled
if (this.finalOutput.type === 'end' && this.finalOutput.options.synchronization && this.finalOutput.options.synchronization.enabled &&
this.finalOutput.options.synchronization.enabled === true) {
const sync = new Synchronizer(
this,
obj.classification,
this.finalOutput.options.synchronization
)
// When the synchronization is finished
sync.synchronize((speech) => {
this.talk(speech)
})
// When the synchronization is finished
sync.synchronize((speech) => {
this.talk(speech)
})
}
}
}
this.socket.emit('is-typing', false)
resolve()
})
Brain.deleteQueryObjFile(queryObjectPath)
this.socket.emit('is-typing', false)
resolve()
})
// Reset the child process
this.process = { }
// Reset the child process
this.process = { }
}
})
}
}

View File

@ -1,6 +1,6 @@
'use strict'
import { LogisticRegressionClassifier } from 'natural'
import { NlpManager } from 'node-nlp'
import request from 'superagent'
import fs from 'fs'
@ -28,28 +28,31 @@ class Nlu {
log.title('NLU')
reject({ type: 'warning', obj: new Error('The expressions classifier does not exist, please run: npm run train expressions') })
} else {
LogisticRegressionClassifier.load(classifierFile, null, (err, classifier) => {
log.title('NLU')
log.title('NLU')
/* istanbul ignore if */
if (err) {
this.brain.talk(`${this.brain.wernicke('random_errors')}! ${this.brain.wernicke('errors', 'nlu', { '%error%': err.message })}.`)
this.brain.socket.emit('is-typing', false)
reject({ type: 'error', obj: err })
} else {
this.classifier = classifier
this.classifier.train()
log.success('Classifier loaded')
resolve()
}
})
try {
const data = fs.readFileSync(classifierFile, 'utf8')
const manager = new NlpManager()
manager.import(data)
this.classifier = manager
log.success('Classifier loaded')
resolve()
} catch (err) {
this.brain.talk(`${this.brain.wernicke('random_errors')}! ${this.brain.wernicke('errors', 'nlu', { '%error%': err.message })}.`)
this.brain.socket.emit('is-typing', false)
reject({ type: 'error', obj: err })
}
}
})
}
/**
* Classify the query
* and pick-up the right classification
* Classify the query,
* pick-up the right classification
* and extract entities
*/
async process (query) {
log.title('NLU')
@ -66,16 +69,17 @@ class Nlu {
return false
}
const result = this.classifier.classify(query)
const packageName = result.substr(0, result.indexOf(':'))
const moduleName = result.substr(result.indexOf(':') + 1)
const classifications = this.classifier.getClassifications(query)
const result = await this.classifier.process(langs[process.env.LEON_LANG].short, query)
const { intent, score, entities } = result
const packageName = intent.substr(0, intent.indexOf(':'))
const moduleName = intent.substr(intent.indexOf(':') + 1)
let obj = {
query,
entities,
classification: {
package: packageName,
module: moduleName,
confidence: classifications[0].value
confidence: score
}
}
@ -94,7 +98,7 @@ class Nlu {
.catch(() => { /* */ })
}
if (obj.classification.confidence <= 0.5) {
if (intent === 'None') {
const fallback = Nlu.fallback(obj, langs[process.env.LEON_LANG].fallbacks)
if (fallback === false) {
@ -117,7 +121,7 @@ class Nlu {
await this.brain.execute(obj)
} catch (e) {
/* istanbul ignore next */
log[e.type](e.obj.message || e.obj)
log[e.type](e.obj.message)
}
return true

View File

@ -160,12 +160,12 @@ class Server {
}
// Listen for new query
socket.on('query', (data) => {
socket.on('query', async (data) => {
log.title('Socket')
log.info(`${data.client} emitted: ${data.value}`)
socket.emit('is-typing', true)
nlu.process(data.value)
await nlu.process(data.value)
})
// Handle automatic speech recognition

View File

@ -34,11 +34,11 @@
"Sorry, I'm still very young, I didn't got your point"
],
"random_not_sure": [
"Sorry, you may repeat",
"Sorry, you may repeat in an another way",
"Sorry, I'm not sure to understand",
"Sorry, I'm not sure for what you asked, please repeat",
"Sorry, please repeat again",
"Sorry, I didn't correctly clean my ears today! Oh wait, I'm your personal assistant then repeat please"
"Sorry, I'm not sure for what you asked, please repeat with a different way",
"Sorry, please repeat again by formulating differently",
"Sorry, I didn't correctly clean my ears today! Oh wait, I'm your personal assistant then please try again with a new way"
],
"random_not_able": [
"Sorry, I'm not able to answer. I understand what you said, but please repeat in another way",

View File

@ -34,11 +34,11 @@
"Désolé, je suis encore très jeune, je n'ai pas compris votre demande"
],
"random_not_sure": [
"Désolé, vous pouvez répéter",
"Désolé, vous pouvez répéter d'une autre façon",
"Désolé, je ne suis pas sûr de comprendre",
"Désolé, je ne suis pas certain de votre demande, merci de répéter",
"Désolé, merci de répéter à nouveau",
"Désolé, je n'ai pas nettoyé mes oreilles correctement ! Attendez-voir, je suis votre assistant personnel, je vous prie donc de répéter"
"Désolé, je ne suis pas certain de votre demande, merci de répéter d'une manière différente",
"Désolé, merci de répéter à nouveau en formulant différemment",
"Désolé, je n'ai pas nettoyé mes oreilles correctement ! Attendez-voir, je suis votre assistant personnel, je vous prie donc de répéter d'une nouvelle façon"
],
"random_not_able": [
"Désolé, je ne suis pas capable de répondre. J'ai compris ce que vous avez dit, mais je vous prie de répéter d'une autre façon",

View File

@ -0,0 +1,32 @@
{
"lang": "en",
"package": "checker",
"module": "isitdown",
"query": "Check if github.com, mozilla.org and twitter.com are up",
"entities": [
{
"sourceText": "github.com",
"utteranceText": "github.com",
"entity": "url",
"resolution": {
"value": "github.com"
}
},
{
"sourceText": "mozilla.org",
"utteranceText": "mozilla.org",
"entity": "url",
"resolution": {
"value": "mozilla.org"
}
},
{
"sourceText": "twitter.com",
"utteranceText": "twitter.com",
"entity": "url",
"resolution": {
"value": "twitter.com"
}
}
]
}

View File

@ -0,0 +1 @@
{}

View File

@ -8,6 +8,7 @@
"<rootDir>/packages/**/*.spec.js"
],
"setupFiles": [
"jest-canvas-mock",
"<rootDir>/test/paths.setup.js"
],
"setupFilesAfterEnv": [

View File

@ -64,7 +64,7 @@ describe('NLU modules', () => {
// Need to redefine the NLU brain execution to update the mocking
nlu.brain.execute = jest.fn()
nlu.process(exprs[l])
await nlu.process(exprs[l])
const [obj] = nlu.brain.execute.mock.calls
// Execute/test each module one time (otherwise this test would be slow)

View File

@ -3,7 +3,7 @@ global.paths = {
packages: `${__dirname}/../packages`,
server: `${__dirname}/../server/src`,
classifier: `${__dirname}/../server/src/data/expressions/classifier.json`,
broken_classifier: `${__dirname}/assets/broken-classifier.json`,
wave_speech: `${__dirname}/assets/speech-test.wav`,
wave_speech_8: `${__dirname}/assets/speech-8kHz-test.wav`
}

View File

@ -67,6 +67,7 @@ describe('brain', () => {
const obj = {
query: 'Hello',
entities: [],
classification: {
package: 'leon',
module: 'greeting',
@ -85,6 +86,14 @@ describe('brain', () => {
const obj = {
query: 'Is github.com up?',
entities: [{
sourceText: 'github.com',
utteranceText: 'github.com',
entity: 'url',
resolution: {
value: 'github.com'
}
}],
classification: {
package: 'checker',
module: 'isitdown',
@ -103,6 +112,7 @@ describe('brain', () => {
const obj = {
query: 'Hello',
entities: [],
classification: {
package: 'leon',
module: 'greeting',

View File

@ -22,6 +22,17 @@ describe('NLU', () => {
}
})
test('rejects because of a broken classifier', async () => {
const nlu = new Nlu()
nlu.brain = { talk: jest.fn(), wernicke: jest.fn(), socket: { emit: jest.fn() } }
try {
await nlu.loadModel(global.paths.broken_classifier)
} catch (e) {
expect(e.type).toBe('error')
}
})
test('loads the classifier', async () => {
const nlu = new Nlu()
@ -45,7 +56,7 @@ describe('NLU', () => {
nlu.brain = { talk: jest.fn(), wernicke: jest.fn(), socket: { emit: jest.fn() } }
await nlu.loadModel(global.paths.classifier)
expect(await nlu.process('This is a query example to test unknown queries')).toBeFalsy()
expect(await nlu.process('Unknown query')).toBeFalsy()
expect(nlu.brain.talk).toHaveBeenCalledTimes(1)
})
@ -56,7 +67,7 @@ describe('NLU', () => {
Nlu.fallback = jest.fn(() => fallbackObj)
await nlu.loadModel(global.paths.classifier)
expect(nlu.process('This is a query example to test fallbacks')).toBeTruthy()
expect(await nlu.process('Thisisaqueryexampletotestfallbacks')).toBeTruthy()
expect(nlu.brain.execute.mock.calls[0][0]).toBe(fallbackObj)
Nlu.fallback = nluFallbackTmp // Need to give back the real fallback method
})
@ -66,7 +77,7 @@ describe('NLU', () => {
nlu.brain = { execute: jest.fn() }
await nlu.loadModel(global.paths.classifier)
expect(nlu.process('Hello')).toBeTruthy()
expect(await nlu.process('Hello')).toBeTruthy()
expect(nlu.brain.execute).toHaveBeenCalledTimes(1)
})
})

View File

@ -90,7 +90,7 @@ describe('server', () => {
setTimeout(() => {
ee.emit('query', { client: 'jest', value: 'Hello' })
expect(console.log.mock.calls[0][1]).toBe('NLU')
expect(['NLU', 'SOCKET']).toContain(console.log.mock.calls[0][1])
console.log = jest.fn()
ee.emit('recognize', { })

View File

@ -8,6 +8,7 @@
"<rootDir>/test/unit/**/*.spec.js"
],
"setupFiles": [
"jest-canvas-mock",
"<rootDir>/test/paths.setup.js"
],
"setupFilesAfterEnv": [