mirror of
https://github.com/leon-ai/leon.git
synced 2024-12-18 14:21:32 +03:00
feat(server): expose queries over HTTP
This commit is contained in:
parent
115f9c1645
commit
b6428d0384
@ -26,10 +26,18 @@ class Brain {
|
|||||||
log.success('New instance')
|
log.success('New instance')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
get socket () {
|
||||||
|
return this._socket
|
||||||
|
}
|
||||||
|
|
||||||
set socket (newSocket) {
|
set socket (newSocket) {
|
||||||
this._socket = newSocket
|
this._socket = newSocket
|
||||||
}
|
}
|
||||||
|
|
||||||
|
get tts () {
|
||||||
|
return this._tts
|
||||||
|
}
|
||||||
|
|
||||||
set tts (newTts) {
|
set tts (newTts) {
|
||||||
this._tts = newTts
|
this._tts = newTts
|
||||||
}
|
}
|
||||||
@ -259,8 +267,11 @@ class Brain {
|
|||||||
const executionTime = executionTimeEnd - executionTimeStart
|
const executionTime = executionTimeEnd - executionTimeStart
|
||||||
|
|
||||||
resolve({
|
resolve({
|
||||||
|
queryId,
|
||||||
|
lang: langs[process.env.LEON_LANG].short,
|
||||||
|
...obj,
|
||||||
speeches,
|
speeches,
|
||||||
executionTime
|
executionTime // In ms, module execution time only
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -55,93 +55,125 @@ class Nlu {
|
|||||||
* pick-up the right classification
|
* pick-up the right classification
|
||||||
* and extract entities
|
* and extract entities
|
||||||
*/
|
*/
|
||||||
async process (query) {
|
process (query, opts) {
|
||||||
log.title('NLU')
|
const processingTimeStart = Date.now()
|
||||||
log.info('Processing...')
|
|
||||||
|
|
||||||
query = string.ucfirst(query)
|
return new Promise(async (resolve, reject) => {
|
||||||
|
log.title('NLU')
|
||||||
|
log.info('Processing...')
|
||||||
|
|
||||||
if (Object.keys(this.nlp).length === 0) {
|
opts = opts || {
|
||||||
this.brain.talk(`${this.brain.wernicke('random_errors')}!`)
|
mute: false // Close Leon mouth e.g. over HTTP
|
||||||
this.brain.socket.emit('is-typing', false)
|
|
||||||
|
|
||||||
log.error('The NLP model is missing, please rebuild the project or if you are in dev run: npm run train')
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
const lang = langs[process.env.LEON_LANG].short
|
|
||||||
const result = await this.nlp.process(lang, query)
|
|
||||||
|
|
||||||
const {
|
|
||||||
domain, intent, score
|
|
||||||
} = result
|
|
||||||
const [moduleName, actionName] = intent.split('.')
|
|
||||||
let obj = {
|
|
||||||
query,
|
|
||||||
entities: [],
|
|
||||||
classification: {
|
|
||||||
package: domain,
|
|
||||||
module: moduleName,
|
|
||||||
action: actionName,
|
|
||||||
confidence: score
|
|
||||||
}
|
}
|
||||||
}
|
query = string.ucfirst(query)
|
||||||
|
|
||||||
/* istanbul ignore next */
|
if (Object.keys(this.nlp).length === 0) {
|
||||||
if (process.env.LEON_LOGGER === 'true' && process.env.LEON_NODE_ENV !== 'testing') {
|
if (!opts.mute) {
|
||||||
this.request
|
this.brain.talk(`${this.brain.wernicke('random_errors')}!`)
|
||||||
.post('https://logger.getleon.ai/v1/expressions')
|
this.brain.socket.emit('is-typing', false)
|
||||||
.set('X-Origin', 'leon-core')
|
}
|
||||||
.send({
|
|
||||||
version,
|
const msg = 'The NLP model is missing, please rebuild the project or if you are in dev run: npm run train'
|
||||||
query,
|
log.error(msg)
|
||||||
|
return reject(msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
const lang = langs[process.env.LEON_LANG].short
|
||||||
|
const result = await this.nlp.process(lang, query)
|
||||||
|
|
||||||
|
const {
|
||||||
|
domain, intent, score
|
||||||
|
} = result
|
||||||
|
const [moduleName, actionName] = intent.split('.')
|
||||||
|
let obj = {
|
||||||
|
query,
|
||||||
|
entities: [],
|
||||||
|
classification: {
|
||||||
|
package: domain,
|
||||||
|
module: moduleName,
|
||||||
|
action: actionName,
|
||||||
|
confidence: score
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* istanbul ignore next */
|
||||||
|
if (process.env.LEON_LOGGER === 'true' && process.env.LEON_NODE_ENV !== 'testing') {
|
||||||
|
this.request
|
||||||
|
.post('https://logger.getleon.ai/v1/expressions')
|
||||||
|
.set('X-Origin', 'leon-core')
|
||||||
|
.send({
|
||||||
|
version,
|
||||||
|
query,
|
||||||
|
lang,
|
||||||
|
classification: obj.classification
|
||||||
|
})
|
||||||
|
.then(() => { /* */ })
|
||||||
|
.catch(() => { /* */ })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (intent === 'None') {
|
||||||
|
const fallback = Nlu.fallback(obj, langs[process.env.LEON_LANG].fallbacks)
|
||||||
|
|
||||||
|
if (fallback === false) {
|
||||||
|
if (!opts.mute) {
|
||||||
|
this.brain.talk(`${this.brain.wernicke('random_unknown_queries')}.`, true)
|
||||||
|
this.brain.socket.emit('is-typing', false)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.title('NLU')
|
||||||
|
const msg = 'Query not found'
|
||||||
|
log.warning(msg)
|
||||||
|
|
||||||
|
const processingTimeEnd = Date.now()
|
||||||
|
const processingTime = processingTimeEnd - processingTimeStart
|
||||||
|
|
||||||
|
return resolve({
|
||||||
|
processingTime,
|
||||||
|
message: msg
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
obj = fallback
|
||||||
|
}
|
||||||
|
|
||||||
|
log.title('NLU')
|
||||||
|
log.success('Query found')
|
||||||
|
|
||||||
|
try {
|
||||||
|
obj.entities = await this.ner.extractEntities(
|
||||||
lang,
|
lang,
|
||||||
classification: obj.classification
|
join(__dirname, '../../../packages', obj.classification.package, `data/expressions/${lang}.json`),
|
||||||
})
|
obj
|
||||||
.then(() => { /* */ })
|
)
|
||||||
.catch(() => { /* */ })
|
} catch (e) /* istanbul ignore next */ {
|
||||||
}
|
log[e.type](e.obj.message)
|
||||||
|
|
||||||
if (intent === 'None') {
|
if (!opts.mute) {
|
||||||
const fallback = Nlu.fallback(obj, langs[process.env.LEON_LANG].fallbacks)
|
this.brain.talk(`${this.brain.wernicke(e.code, '', e.data)}!`)
|
||||||
|
}
|
||||||
if (fallback === false) {
|
|
||||||
this.brain.talk(`${this.brain.wernicke('random_unknown_queries')}.`, true)
|
|
||||||
this.brain.socket.emit('is-typing', false)
|
|
||||||
|
|
||||||
log.title('NLU')
|
|
||||||
log.warning('Query not found')
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
}
|
||||||
|
|
||||||
obj = fallback
|
try {
|
||||||
}
|
// Inject action entities with the others if there is
|
||||||
|
const data = await this.brain.execute(obj, { mute: opts.mute })
|
||||||
|
const processingTimeEnd = Date.now()
|
||||||
|
const processingTime = processingTimeEnd - processingTimeStart
|
||||||
|
|
||||||
log.title('NLU')
|
return resolve({
|
||||||
log.success('Query found')
|
processingTime, // In ms, total time
|
||||||
|
...data,
|
||||||
|
nluProcessingTime: processingTime - data.executionTime // In ms, NLU processing time only
|
||||||
|
})
|
||||||
|
} catch (e) /* istanbul ignore next */ {
|
||||||
|
log[e.type](e.obj.message)
|
||||||
|
|
||||||
try {
|
if (!opts.mute) {
|
||||||
obj.entities = await this.ner.extractEntities(
|
this.brain.socket.emit('is-typing', false)
|
||||||
lang,
|
}
|
||||||
join(__dirname, '../../../packages', obj.classification.package, `data/expressions/${lang}.json`),
|
|
||||||
obj
|
|
||||||
)
|
|
||||||
} catch (e) /* istanbul ignore next */ {
|
|
||||||
log[e.type](e.obj.message)
|
|
||||||
this.brain.talk(`${this.brain.wernicke(e.code, '', e.data)}!`)
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
return reject(e.obj)
|
||||||
// Inject action entities with the others if there is
|
}
|
||||||
await this.brain.execute(obj)
|
})
|
||||||
} catch (e) /* istanbul ignore next */ {
|
|
||||||
log[e.type](e.obj.message)
|
|
||||||
this.brain.socket.emit('is-typing', false)
|
|
||||||
}
|
|
||||||
|
|
||||||
return true
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -82,18 +82,14 @@ const generatePackagesRoutes = () => {
|
|||||||
package: pkg,
|
package: pkg,
|
||||||
module,
|
module,
|
||||||
action,
|
action,
|
||||||
execution_time: 0, // ms
|
|
||||||
speeches: []
|
speeches: []
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { speeches, executionTime } = await brain.execute(obj, { mute: true })
|
const data = await brain.execute(obj, { mute: true })
|
||||||
|
|
||||||
reply.send({
|
reply.send({
|
||||||
...responseData,
|
...data,
|
||||||
entities,
|
|
||||||
speeches,
|
|
||||||
execution_time: executionTime,
|
|
||||||
success: true
|
success: true
|
||||||
})
|
})
|
||||||
} catch (e) /* istanbul ignore next */ {
|
} catch (e) /* istanbul ignore next */ {
|
||||||
@ -102,7 +98,7 @@ const generatePackagesRoutes = () => {
|
|||||||
reply.send({
|
reply.send({
|
||||||
...responseData,
|
...responseData,
|
||||||
speeches: e.speeches,
|
speeches: e.speeches,
|
||||||
execution_time: e.executionTime,
|
executionTime: e.executionTime,
|
||||||
error: e.obj.message,
|
error: e.obj.message,
|
||||||
success: false
|
success: false
|
||||||
})
|
})
|
||||||
@ -222,13 +218,32 @@ const bootstrap = async () => {
|
|||||||
root: join(__dirname, '..', '..', '..', 'app', 'dist'),
|
root: join(__dirname, '..', '..', '..', 'app', 'dist'),
|
||||||
prefix: '/'
|
prefix: '/'
|
||||||
})
|
})
|
||||||
fastify.get('/', (_request, reply) => {
|
fastify.get('/', (request, reply) => {
|
||||||
reply.sendFile('index.html')
|
reply.sendFile('index.html')
|
||||||
})
|
})
|
||||||
|
|
||||||
fastify.register(infoPlugin, { apiVersion })
|
fastify.register(infoPlugin, { apiVersion })
|
||||||
fastify.register(downloadsPlugin, { apiVersion })
|
fastify.register(downloadsPlugin, { apiVersion })
|
||||||
|
|
||||||
|
fastify.post('/core/query', async (request, reply) => {
|
||||||
|
const { query } = request.body
|
||||||
|
|
||||||
|
try {
|
||||||
|
const data = await nlu.process(query, { mute: true })
|
||||||
|
|
||||||
|
reply.send({
|
||||||
|
...data,
|
||||||
|
success: true
|
||||||
|
})
|
||||||
|
} catch (e) {
|
||||||
|
reply.statusCode = 500
|
||||||
|
reply.send({
|
||||||
|
error: e.message,
|
||||||
|
success: false
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
if (process.env.PACKAGES_OVER_HTTP === 'true') {
|
if (process.env.PACKAGES_OVER_HTTP === 'true') {
|
||||||
generatePackagesRoutes()
|
generatePackagesRoutes()
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user